14 import validationserver
16 from validationtestutil
import check_execute
19 validation_url =
"http://localhost:8000/"
22 def start_webserver():
24 Start the validation server process, this will not
25 return. Therefore this function must be started within
31 def http_post(command, json_args):
32 call_url = validation_url + command
33 print(
"Posting {} to {}".format(json_args, command))
34 r = requests.post(call_url,
37 print(
"REST call {} with arguments {} failed".format(
45 def check_for_plotting(revs, tmp_folder):
47 Checks if creating new plots for a revision combination works
48 :param revs: List of revisions
50 :param tmp_folder: Temporary folder
53 print(
"Trying to recreate plots for revisions {}".format(revs))
55 res = http_post(
"create_comparison", {
"revision_list": revs})
60 prog_key = res.json()[
"progress_key"]
70 res = http_post(
"check_comparison_status", {
"input": prog_key})
75 if res.json()[
"status"] ==
"complete":
80 summed_wait_time += wait_time
81 if summed_wait_time > max_wait_time:
82 print(
"Waited for {} seconds for the requested plots to complete "
83 "and nothing happened".format(summed_wait_time))
92 if not os.path.exists(comp_folder):
93 print(
"Comparison folder {} does not exist".format(comp_folder))
95 if not os.path.isfile(comp_json):
96 print(
"Comparison json {} does not exist".format(comp_json))
100 some_plot = os.path.join(
103 "validationTestPlotsB_gaus_histogram.pdf"
105 if not os.path.isfile(some_plot):
106 print(
"Comparison plot {} does not exist".format(some_plot))
109 print(
"Comparison properly created")
113 def check_for_content(revs, min_matrix_plots, min_plot_objects):
115 Checks for the expected content on the validation website
120 print(
"The splinter package is required to run this test. Run 'pip3 "
121 "install splinter' to install")
126 with splinter.Browser()
as browser:
128 url = validation_url +
"static/validation.html"
129 print(
"Opening {} to perform checks", url)
132 if len(browser.title) == 0:
133 print(
"Validation website cannot be loaded")
136 found_revs = browser.find_by_css(
".revision-label")
139 rr = [web_r
for web_r
in found_revs
if web_r.value == r]
141 print(
"Revsion {} was not found on validation website. It "
142 "should be there.".format(r))
145 plot_objects = browser.find_by_css(
".object")
147 print(
"Checking for a minimum number of {} plot objects",
149 if len(plot_objects) < min_plot_objects:
150 print(
"Only {} plots found, while {} are expected".format(
151 len(plot_objects), min_plot_objects))
155 checkbox_overview = browser.find_by_id(
"check_show_overview")
157 checkbox_overview.check()
169 Runs two test validations, starts the web server and queries data
173 print(
"TEST SKIPPED: Not properly runnable on build bot", file=sys.stderr)
183 print(
"TEST SKIPPED: The splinter package is required to run this test." +
184 "Run 'pip3 install splinter' to install", file=sys.stderr)
189 revs_to_gen = [
"stack_test_1",
"stack_test_2",
"stack_test_3"]
193 with tempfile.TemporaryDirectory()
as tmpdir:
196 os.chdir(str(tmpdir))
198 for r
in revs_to_gen:
199 check_execute(
"validate_basf2 --test --tag {}".format(r))
205 server_process = subprocess.Popen([
"run_validation_server"])
210 success = success
and \
211 check_for_content(revs_to_gen + [
"reference"], 7, 7)
214 success = success
and check_for_plotting(revs_to_gen[:-1], str(tmpdir))
215 except BaseException:
218 e = sys.exc_info()[0]
220 print(
"Error {}".format(e))
221 print(traceback.format_exc())
225 server_process.terminate()
227 server_process.wait()
233 if __name__ ==
"__main__":