4 Script to prepare a local database for HLT validation.
6 This is more complicated as we would like because performance of HLT depends on
7 the payloads matching to the given run but we also want to valdiate new payloads
8 and/or new software versions. So as a "best guess" what could work we take with
11 1. any payloads in online that are unlimited.
12 2. any payloads in online valid for the validation run but no longer unlimited
13 *if* a payload with the same name still exists as unlimited in the online tag.
14 3. any payloads in staging_online
16 We take all of these and put them in a local database file and download all the
17 necessary payload files.
22 from pathlib
import Path
23 import multiprocessing
25 from conditions_db
import ConditionsDB
31 def update_payload(existing, payload, priority, source):
34 update = payload.name
not in existing
36 other = existing[payload.name]
37 update = other.priority < priority
or \
38 (other.priority == priority
and other.revision < payload.revision)
42 p = copy.copy(payload)
43 p.iov = (0, 0, -1, -1)
50 def prepare_globaltag(db, exp, run):
54 base = db.get_all_iovs(
"online")
55 existing_names = set()
57 iov = IntervalOfValidity(p.iov)
59 update_payload(payloads, p, 1,
"online")
60 existing_names.add(p.name)
63 iov = IntervalOfValidity(p.iov)
64 if not iov.is_open
and iov.contains(exp, run)
and p.name
in existing_names:
65 update_payload(payloads, p, 2, f
"e{exp}r{run}")
68 staging = db.get_all_iovs(
"staging_online")
70 update_payload(payloads, p, 3,
"staging")
72 return sorted(payloads.values())
75 if __name__ ==
"__main__":
76 output_dir = Path(
"cdb")
77 metadata = output_dir /
"metadata.sqlite"
81 output_dir.mkdir(parents=
True, exist_ok=
True)
88 existing_gt = os.environ.get(
"VALIDATION_GLOBALTAG",
"").strip()
90 payloads = db.get_all_iovs(existing_gt)
92 p.source = existing_gt
94 payloads = prepare_globaltag(db, int(os.environ[
'VALIDATION_EXP']), int(os.environ[
'VALIDATION_RUN']))
97 localdb = LocalMetadataProvider(str(metadata), mode=
"overwrite")
98 localdb.add_globaltag(1,
"online",
"RUNNING", payloads)
101 downloader = downloader = functools.partial(download_payload, db, directory=output_dir)
102 with multiprocessing.Pool()
as p:
103 p.map(downloader, payloads)
106 maxlen = max(len(p.name)
for p
in payloads)
107 print(
"Payloads taken: ")
109 print(f
"{p.name:<{maxlen}s} {p.revision:7d} from {p.source}")