21 import validationserver
23 from validationtestutil
import check_execute
26 validation_url =
"http://localhost:8000/"
29 def start_webserver():
31 Start the validation server process, this will not
32 return. Therefore this function must be started within
38 def http_post(command, json_args):
39 call_url = validation_url + command
40 print(f
"Posting {json_args} to {command}")
41 r = requests.post(call_url, json=json_args)
44 "REST call {} with arguments {} failed".format(call_url, json_args)
52 def check_for_plotting(revs, tmp_folder):
54 Checks if creating new plots for a revision combination works
55 :param revs: List of revisions
57 :param tmp_folder: Temporary folder
60 print(f
"Trying to recreate plots for revisions {revs}")
62 res = http_post(
"create_comparison", {
"revision_list": revs})
67 prog_key = res.json()[
"progress_key"]
77 res = http_post(
"check_comparison_status", {
"input": prog_key})
82 if res.json()[
"status"] ==
"complete":
87 summed_wait_time += wait_time
88 if summed_wait_time > max_wait_time:
90 "Waited for {} seconds for the requested plots to complete "
91 "and nothing happened".format(summed_wait_time)
103 if not os.path.exists(comp_folder):
104 print(f
"Comparison folder {comp_folder} does not exist")
106 if not os.path.isfile(comp_json):
107 print(f
"Comparison json {comp_json} does not exist")
111 some_plot = os.path.join(
114 "validationTestPlotsB_gaus_histogram.pdf",
116 if not os.path.isfile(some_plot):
117 print(f
"Comparison plot {some_plot} does not exist")
120 print(
"Comparison properly created")
124 def check_for_content(revs, min_matrix_plots, min_plot_objects):
126 Checks for the expected content on the validation website
132 "The splinter package is required to run this test. Run 'pip3 "
133 "install splinter' to install"
139 with splinter.Browser()
as browser:
141 url = validation_url +
"static/validation.html"
142 print(
"Opening {} to perform checks", url)
145 if len(browser.title) == 0:
146 print(
"Validation website cannot be loaded")
149 found_revs = browser.find_by_css(
".revision-label")
152 rr = [web_r
for web_r
in found_revs
if web_r.value == r]
155 "Revsion {} was not found on validation website. It "
156 "should be there.".format(r)
160 plot_objects = browser.find_by_css(
".object")
163 "Checking for a minimum number of {} plot objects", min_plot_objects
165 if len(plot_objects) < min_plot_objects:
167 "Only {} plots found, while {} are expected".format(
168 len(plot_objects), min_plot_objects
174 checkbox_overview = browser.find_by_id(
"check_show_overview")
176 checkbox_overview.check()
188 Runs two test validations, starts the web server and queries data
192 print(
"TEST SKIPPED: Not properly runnable on build bot", file=sys.stderr)
205 "TEST SKIPPED: The splinter package is required to run this test."
206 +
"Run 'pip3 install splinter' to install",
213 revs_to_gen = [
"stack_test_1",
"stack_test_2",
"stack_test_3"]
217 with tempfile.TemporaryDirectory()
as tmpdir:
220 os.chdir(str(tmpdir))
222 for r
in revs_to_gen:
223 check_execute(f
"validate_basf2 --test --tag {r}")
229 server_process = subprocess.Popen([
"run_validation_server"])
234 success = success
and check_for_content(
235 revs_to_gen + [
"reference"], 7, 7
239 success = success
and check_for_plotting(
240 revs_to_gen[:-1], str(tmpdir)
242 except BaseException:
245 e = sys.exc_info()[0]
248 print(traceback.format_exc())
252 server_process.terminate()
254 server_process.wait()
260 if __name__ ==
"__main__":
int main(int argc, char **argv)
Run all tests.
def get_html_plots_tag_comparison_json(output_base_dir, tags)
def get_html_plots_tag_comparison_folder(output_base_dir, tags)
def run_server(ip="127.0.0.1", port=8000, parse_command_line=False, open_site=False, dry_run=False)