Belle II Software  release-08-01-10
setup-01-preparelocaldb.py
1 #!/usr/bin/env python3
2 
3 
10 
11 """
12 Script to prepare a local database for HLT validation.
13 
14 This is more complicated as we would like because performance of HLT depends on
15 the payloads matching to the given run but we also want to valdiate new payloads
16 and/or new software versions. So as a "best guess" what could work we take with
17 the following priority
18 
19 1. any payloads in online that are unlimited.
20 2. any payloads in online valid for the validation run but no longer unlimited
21  *if* a payload with the same name still exists as unlimited in the online tag.
22 3. any payloads in staging_online
23 
24 We take all of these and put them in a local database file and download all the
25 necessary payload files.
26 """
27 
28 import os
29 import copy
30 from pathlib import Path
31 import multiprocessing
32 import functools
33 from conditions_db import ConditionsDB
34 from conditions_db.iov import IntervalOfValidity
35 from conditions_db.local_metadata import LocalMetadataProvider
36 from conditions_db.cli_download import download_payload
37 
38 
39 def update_payload(existing, payload, priority, source):
40  # normally we'd just take one but right now there are overlaps in
41  # online so better safe than sorry: take highest revision
42  update = payload.name not in existing
43  if not update:
44  other = existing[payload.name]
45  update = other.priority < priority or \
46  (other.priority == priority and other.revision < payload.revision)
47 
48  if update:
49  # keep for all runs
50  p = copy.copy(payload)
51  p.iov = (0, 0, -1, -1)
52  # and remember priority gt for debugging
53  p.priority = priority
54  p.source = source
55  existing[p.name] = p
56 
57 
58 def prepare_globaltag(db, exp, run):
59  payloads = {}
60 
61  # get the unlimited iovs from online ... and the ones for our particular run
62  base = db.get_all_iovs("online")
63  existing_names = set()
64  for p in base:
65  iov = IntervalOfValidity(p.iov)
66  if iov.is_open:
67  update_payload(payloads, p, 1, "online")
68  existing_names.add(p.name)
69 
70  for p in base:
71  iov = IntervalOfValidity(p.iov)
72  if not iov.is_open and iov.contains(exp, run) and p.name in existing_names:
73  update_payload(payloads, p, 2, f"e{exp}r{run}")
74 
75  # and take everything from staging
76  staging = db.get_all_iovs("staging_online")
77  for p in staging:
78  update_payload(payloads, p, 3, "staging")
79 
80  return sorted(payloads.values())
81 
82 
83 if __name__ == "__main__":
84  output_dir = Path("cdb")
85  metadata = output_dir / "metadata.sqlite"
86  if metadata.exists():
87  metadata.unlink()
88  else:
89  output_dir.mkdir(parents=True, exist_ok=True)
90 
91  db = ConditionsDB()
92 
93  # check if we have a prepared globaltag to check Also allow overriding this in
94  # a bamboo build variable if necessary. If we have a prepared globaltag we don't
95  # build our own version but just download it.
96  existing_gt = os.environ.get("VALIDATION_GLOBALTAG", "").strip()
97  if existing_gt:
98  payloads = db.get_all_iovs(existing_gt)
99  for p in payloads:
100  p.source = existing_gt
101  else:
102  payloads = prepare_globaltag(db, int(os.environ['VALIDATION_EXP']), int(os.environ['VALIDATION_RUN']))
103 
104  # Save under the name "online" in the local database file
105  localdb = LocalMetadataProvider(str(metadata), mode="overwrite")
106  localdb.add_globaltag(1, "online", "RUNNING", payloads)
107 
108  # Download all the payloads
109  downloader = downloader = functools.partial(download_payload, db, directory=output_dir)
110  with multiprocessing.Pool() as p:
111  p.map(downloader, payloads)
112 
113  # and print the final result
114  maxlen = max(len(p.name) for p in payloads)
115  print("Payloads taken: ")
116  for p in payloads:
117  print(f"{p.name:<{maxlen}s} {p.revision:7d} from {p.source}")