21import validationserver
23from validationtestutil
import check_execute
26validation_url =
"http://localhost:8000/"
31 Start the validation server process, this will not
32 return. Therefore this function must be started within
38def http_post(command, json_args):
39 call_url = validation_url + command
40 print(f
"Posting {json_args} to {command}")
41 r = requests.post(call_url, json=json_args)
44 f
"REST call {call_url} with arguments {json_args} failed"
52def check_for_plotting(revs, tmp_folder):
54 Checks if creating new plots
for a revision combination works
55 :param revs: List of revisions
57 :param tmp_folder: Temporary folder
60 print(f"Trying to recreate plots for revisions {revs}")
62 res = http_post(
"create_comparison", {
"revision_list": revs})
67 prog_key = res.json()[
"progress_key"]
77 res = http_post(
"check_comparison_status", {
"input": prog_key})
82 if res.json()[
"status"] ==
"complete":
87 summed_wait_time += wait_time
88 if summed_wait_time > max_wait_time:
90 f
"Waited for {summed_wait_time} seconds for the requested plots to complete and nothing happened"
102 if not os.path.exists(comp_folder):
103 print(f
"Comparison folder {comp_folder} does not exist")
105 if not os.path.isfile(comp_json):
106 print(f
"Comparison json {comp_json} does not exist")
110 some_plot = os.path.join(
113 "validationTestPlotsB_gaus_histogram.pdf",
115 if not os.path.isfile(some_plot):
116 print(f
"Comparison plot {some_plot} does not exist")
119 print(
"Comparison properly created")
123def check_for_content(revs, min_matrix_plots, min_plot_objects):
125 Checks for the expected content on the validation website
131 "The splinter package is required to run this test. Run 'pip3 "
132 "install splinter' to install"
138 with splinter.Browser()
as browser:
140 url = validation_url +
"static/validation.html"
141 print(
"Opening {} to perform checks", url)
144 if len(browser.title) == 0:
145 print(
"Validation website cannot be loaded")
148 found_revs = browser.find_by_css(
".revision-label")
151 rr = [web_r
for web_r
in found_revs
if web_r.value == r]
154 f
"Revision {r} was not found on validation website. It should be there."
158 plot_objects = browser.find_by_css(
".object")
161 "Checking for a minimum number of {} plot objects", min_plot_objects
163 if len(plot_objects) < min_plot_objects:
165 f
"Only {len(plot_objects)} plots found, while {min_plot_objects} are expected"
170 checkbox_overview = browser.find_by_id(
"check_show_overview")
172 checkbox_overview.check()
184 Runs two test validations, starts the web server and queries data
188 print(
"TEST SKIPPED: Not properly runnable on build bot", file=sys.stderr)
201 "TEST SKIPPED: The splinter package is required to run this test."
202 +
"Run 'pip3 install splinter' to install",
209 revs_to_gen = [
"stack_test_1",
"stack_test_2",
"stack_test_3"]
213 with tempfile.TemporaryDirectory()
as tmpdir:
216 os.chdir(str(tmpdir))
218 for r
in revs_to_gen:
219 check_execute(f
"validate_basf2 --test --tag {r}")
225 server_process = subprocess.Popen([
"run_validation_server"])
230 success = success
and check_for_content(
231 revs_to_gen + [
"reference"], 7, 7
235 success = success
and check_for_plotting(
236 revs_to_gen[:-1], str(tmpdir)
238 except BaseException:
241 e = sys.exc_info()[0]
244 print(traceback.format_exc())
248 server_process.terminate()
250 server_process.wait()
256if __name__ ==
"__main__":
def get_html_plots_tag_comparison_json(output_base_dir, tags)
def get_html_plots_tag_comparison_folder(output_base_dir, tags)
def run_server(ip="127.0.0.1", port=8000, parse_command_line=False, open_site=False, dry_run=False)