12Script to prepare a local database for HLT validation.
14This is more complicated as we would like because performance of HLT depends on
15the payloads matching to the given run but we also want to valdiate new payloads
16and/or new software versions. So as a "best guess" what could work we take with
191. any payloads in online that are unlimited.
202. any payloads in online valid for the validation run but no longer unlimited
21 *if* a payload with the same name still exists as unlimited in the online tag.
223. any payloads in staging_online
24We take all of these and put them in a local database file and download all the
25necessary payload files.
30from pathlib
import Path
33from conditions_db
import ConditionsDB
39def update_payload(existing, payload, priority, source):
42 update = payload.name
not in existing
44 other = existing[payload.name]
45 update = other.priority < priority
or \
46 (other.priority == priority
and other.revision < payload.revision)
50 p = copy.copy(payload)
51 p.iov = (0, 0, -1, -1)
58def prepare_globaltag(db, exp, run):
62 base = db.get_all_iovs(
"online")
63 existing_names = set()
65 iov = IntervalOfValidity(p.iov)
67 update_payload(payloads, p, 1,
"online")
68 existing_names.add(p.name)
71 iov = IntervalOfValidity(p.iov)
72 if not iov.is_open
and iov.contains(exp, run)
and p.name
in existing_names:
73 update_payload(payloads, p, 2, f
"e{exp}r{run}")
76 staging = db.get_all_iovs(
"staging_online")
78 update_payload(payloads, p, 3,
"staging")
80 return sorted(payloads.values())
83if __name__ ==
"__main__":
84 output_dir = Path(
"cdb")
85 metadata = output_dir /
"metadata.sqlite"
89 output_dir.mkdir(parents=
True, exist_ok=
True)
96 existing_gt = os.environ.get(
"VALIDATION_GLOBALTAG",
"").strip()
98 payloads = db.get_all_iovs(existing_gt)
100 p.source = existing_gt
102 payloads = prepare_globaltag(db, int(os.environ[
'VALIDATION_EXP']), int(os.environ[
'VALIDATION_RUN']))
105 localdb = LocalMetadataProvider(str(metadata), mode=
"overwrite")
106 localdb.add_globaltag(1,
"online",
"RUNNING", payloads)
109 downloader = downloader = functools.partial(download_payload, db, directory=output_dir)
110 with multiprocessing.Pool()
as p:
111 p.map(downloader, payloads)
114 maxlen = max(len(p.name)
for p
in payloads)
115 print(
"Payloads taken: ")
117 print(f
"{p.name:<{maxlen}s} {p.revision:7d} from {p.source}")