15 Python interface to the ConditionsDB
19 from basf2
import B2FATAL, B2ERROR, B2INFO, B2WARNING
21 from requests.auth
import HTTPBasicAuth, HTTPDigestAuth
22 from requests.packages.urllib3.fields
import RequestField
23 from requests.packages.urllib3.filepost
import encode_multipart_formdata
26 from versioning
import upload_global_tag, jira_global_tag_v2
27 from collections
import defaultdict
28 from concurrent.futures
import ThreadPoolExecutor, wait
as futures_wait
31 from typing
import Union
34 def encode_name(name):
35 """Escape name to be used in an url"""
36 return urllib.parse.quote(name, safe=
"")
39 def file_checksum(filename):
40 """Calculate md5 hash of file"""
41 md5hash = hashlib.md5()
42 with open(filename,
"rb")
as data:
43 md5hash.update(data.read())
44 return md5hash.hexdigest()
47 def chunks(container, chunk_size):
48 """Cut a container in chunks of max. chunk_size"""
51 chunk = tuple(itertools.islice(it, chunk_size))
58 """Small container class to help compare payload information for efficient
59 comparison between globaltags"""
63 """Set all internal members from the json information of the payload and the iov.
66 payload (dict): json information of the payload as returned by REST api
67 iov (dict): json information of the iov as returned by REST api
70 iov = {
"payloadIovId":
None,
"expStart":
None,
"runStart":
None,
"expEnd":
None,
"runEnd":
None}
74 payload[
'basf2Module'][
'name'],
77 payload[
'payloadUrl'],
80 (iov[
"expStart"], iov[
"runStart"], iov[
"expEnd"], iov[
"runEnd"]),
83 def __init__(self, payload_id, name, revision, checksum, payload_url, base_url, iov_id=None, iov=None):
85 Create a new object from the given information
106 """Return the full url to the payload on the server"""
110 """Make object hashable"""
114 """Check if two payloads are equal"""
115 return (self.
namename, self.
checksumchecksum, self.
ioviov) == (other.name, other.checksum, other.iov)
118 """Sort payloads by name, iov, revision"""
119 return (self.
namename.lower(), self.
ioviov, self.
revisionrevision) < (other.name.lower(), other.iov, other.revision)
122 """return a human readable name for the IoV"""
123 if self.
ioviov
is None:
126 if self.
ioviov == (0, 0, -1, -1):
129 e1, r1, e2, r2 = self.
ioviov
131 if r1 == 0
and r2 == -1:
134 return f
"exp {e1}, runs {r1}+"
136 return f
"exp {e1}, run {r1}"
138 return f
"exp {e1}, runs {r1} - {r2}"
140 if e2 == -1
and r1 == 0:
141 return f
"exp {e1} - forever"
143 return f
"exp {e1}, run {r1} - forever"
144 elif r1 == 0
and r2 == -1:
145 return f
"exp {e1}-{e2}, all runs"
147 return f
"exp {e1}, run {r1} - exp {e2}, all runs"
149 return f
"exp {e1}, run {r1} - exp {e2}, run {r2}"
153 """Class to interface conditions db REST interface"""
156 BASE_URLS = [
"http://belle2db.sdcc.bnl.gov/b2s/rest/"]
159 """Class to be thrown by request() if there is any error"""
164 """Resolve the list of server urls. If a url is given just return it.
165 Otherwise return servers listed in BELLE2_CONDB_SERVERLIST or the
169 given_url (str): Explicit base_url. If this is not None it will be
170 returned as is in a list of length 1
173 a list of urls to try for database connectivity
176 base_url_list = ConditionsDB.BASE_URLS[:]
177 base_url_env = os.environ.get(
"BELLE2_CONDB_SERVERLIST",
None)
178 if given_url
is not None:
179 base_url_list = [given_url]
180 elif base_url_env
is not None:
181 base_url_list = base_url_env.split()
182 B2INFO(
"Getting Conditions Database servers from Environment:")
183 for i, url
in enumerate(base_url_list, 1):
184 B2INFO(f
" {i}. {url}")
187 for url
in base_url_list:
188 if url.startswith(
"http://"):
189 full_list.append(
"https" + url[4:])
191 full_list.append(url)
194 def __init__(self, base_url=None, max_connections=10, retries=3):
196 Create a new instance of the interface
199 base_url (string): base url of the rest interface
200 max_connections (int): number of connections to keep open, mostly useful for threaded applications
201 retries (int): number of retries in case of connection problems
207 adapter = requests.adapters.HTTPAdapter(
208 pool_connections=max_connections, pool_maxsize=max_connections,
209 max_retries=retries, pool_block=
True
211 self.
_session_session.mount(
"http://", adapter)
212 self.
_session_session.mount(
"https://", adapter)
214 if "BELLE2_CONDB_PROXY" in os.environ:
216 "http": os.environ.get(
"BELLE2_CONDB_PROXY"),
217 "https": os.environ.get(
"BELLE2_CONDB_PROXY"),
220 base_url_list = ConditionsDB.get_base_urls(base_url)
222 for url
in base_url_list:
227 req.raise_for_status()
228 except requests.RequestException
as e:
229 B2WARNING(f
"Problem connecting to {url}:\n {e}\n Trying next server ...")
233 B2FATAL(
"No working database servers configured, giving up")
239 self.
_session_session.headers.update({
"Accept":
"application/json",
"Cache-Control":
"no-cache"})
243 Set authentication credentials when talking to the database
247 password (str): password
248 basic (bool): if True us HTTP Basic authentication, otherwise HTTP Digest
250 authtype = HTTPBasicAuth
if basic
else HTTPDigestAuth
251 self.
_session_session.auth = authtype(user, password)
253 def request(self, method, url, message=None, *args, **argk):
255 Request function, similar to requests.request but adding the base_url
258 method (str): GET, POST, etc.
259 url (str): url for the request, base_url will be prepended
260 message (str): message to show when starting the request and if it fails
262 All other arguments will be forwarded to requests.request.
264 if message
is not None:
269 except requests.exceptions.ConnectionError
as e:
270 B2FATAL(
"Could not access '" + self.
_base_url_base_url + url.lstrip(
"/") +
"': " + str(e))
272 if req.status_code >= 300:
276 response = req.json()
277 message = response.get(
"message",
"")
278 colon =
": " if message.strip()
else ""
279 error =
"Request {method} {url} returned {code} {reason}{colon}{message}".format(
280 method=method, url=url,
281 code=response[
"code"],
282 reason=response[
"reason"],
286 except json.JSONDecodeError:
288 error =
"Request {method} {url} returned non JSON response {code}: {content}".format(
289 method=method, url=url,
290 code=req.status_code,
294 if message
is not None:
299 if method !=
"HEAD" and req.status_code != requests.codes.no_content:
302 except json.JSONDecodeError
as e:
303 B2INFO(f
"Invalid response: {req.content}")
305 .format(e, method=method, url=url))
309 """Get a list of all globaltags. Returns a dictionary with the globaltag
310 names and the corresponding ids in the database"""
313 req = self.
requestrequest(
"GET",
"/globalTags")
315 B2ERROR(f
"Could not get the list of globaltags: {e}")
319 for tag
in req.json():
320 result[tag[
"name"]] = tag
325 """Check whether the globaltag with the given name exists."""
328 self.
requestrequest(
"GET",
"/globalTag/{globalTagName}".format(globalTagName=encode_name(name)))
335 """Get the id of the globaltag with the given name. Returns either the
336 id or None if the tag was not found"""
339 req = self.
requestrequest(
"GET",
"/globalTag/{globalTagName}".format(globalTagName=encode_name(name)))
341 B2ERROR(f
"Cannot find globaltag '{name}': {e}")
348 Get the dictionary describing the given globaltag type (currently
349 one of DEV or RELEASE). Returns None if tag type was not found.
352 req = self.
requestrequest(
"GET",
"/globalTagType")
354 B2ERROR(f
"Could not get list of valid globaltag types: {e}")
357 types = {e[
"name"]: e
for e
in req.json()}
362 B2ERROR(
"Unknown globaltag type: '{}', please use one of {}".format(name,
", ".join(types)))
367 Create a new globaltag
369 info = {
"name": name,
"description": description,
"modifiedBy": user,
"isDefault":
False}
371 req = self.
requestrequest(
"POST",
"/globalTag/DEV", f
"Creating globaltag {name}", json=info)
373 B2ERROR(f
"Could not create globaltag {name}: {e}")
380 Return list of all payloads in the given globaltag where each element is
381 a `PayloadInformation` instance
384 gobalTag (str): name of the globaltag
385 exp (int): if given limit the list of payloads to the ones valid for
386 the given exp,run combination
387 run (int): if given limit the list of payloads to the ones valid for
388 the given exp,run combination
389 message (str): additional message to show when downloading the
390 payload information. Will be directly appended to
391 "Obtaining lists of iovs for globaltag {globalTag}"
394 Both, exp and run, need to be given at the same time. Just supplying
395 an experiment or a run number will not work
397 globalTag = encode_name(globalTag)
401 msg = f
"Obtaining list of iovs for globaltag {globalTag}, exp={exp}, run={run}{message}"
402 req = self.
requestrequest(
"GET",
"/iovPayloads", msg, params={
'gtName': globalTag,
'expNumber': exp,
'runNumber': run})
404 msg = f
"Obtaining list of iovs for globaltag {globalTag}{message}"
405 req = self.
requestrequest(
"GET", f
"/globalTag/{globalTag}/globalTagPayloads", msg)
407 for item
in req.json():
408 payload = item[
"payload" if 'payload' in item
else "payloadId"]
409 if "payloadIov" in item:
410 iovs = [item[
'payloadIov']]
412 iovs = item[
'payloadIovs']
415 all_iovs.append(PayloadInformation.from_json(payload, iov))
422 Get a list of all defined payloads (for the given global_tag or by default for all).
423 Returns a dictionary which maps (module, checksum) to the payload id.
428 req = self.
requestrequest(
"GET",
"/globalTag/{global_tag}/payloads"
429 .format(global_tag=encode_name(global_tag)))
431 req = self.
requestrequest(
"GET",
"/payloads")
433 B2ERROR(f
"Cannot get list of payloads: {e}")
437 for payload
in req.json():
438 module = payload[
"basf2Module"][
"name"]
439 checksum = payload[
"checksum"]
440 result[(module, checksum)] = payload[
"payloadId"]
446 Check for the existence of payloads in the database.
449 payloads (list((str,str))): A list of payloads to check for. Each
450 payload needs to be a tuple of the name of the payload and the
451 md5 checksum of the payload file.
452 information (str): The information to be extracted from the
456 A dictionary with the payload identifiers (name, checksum) as keys
457 and the requested information as values for all payloads which are already
458 present in the database.
461 search_query = [{
"name": e[0],
"checksum": e[1]}
for e
in payloads]
463 req = self.
requestrequest(
"POST",
"/checkPayloads", json=search_query)
465 B2ERROR(f
"Cannot check for existing payloads: {e}")
469 for payload
in req.json():
470 module = payload[
"basf2Module"][
"name"]
471 checksum = payload[
"checksum"]
472 result[(module, checksum)] = payload[information]
478 Get the revision numbers of payloads in the database.
481 entries (list): A list of payload entries.
482 Each entry must have the attributes module and checksum.
488 result = self.
check_payloadscheck_payloads([(entry.module, entry.checksum)
for entry
in entries],
"revision")
492 for entry
in entries:
493 entry.revision = result.get((entry.module, entry.checksum), 0)
502 module (str): name of the module
503 filename (str): name of the file
504 checksum (str): md5 hexdigest of the file. Will be calculated automatically if not given
507 checksum = file_checksum(filename)
513 (filename, open(filename,
"rb").read(),
"application/x-root"),
514 (
"json", json.dumps({
"checksum": checksum,
"isDefault":
False}),
"application/json"),
519 for name, contents, mimetype
in files:
520 rf = RequestField(name=name, data=contents)
521 rf.make_multipart(content_type=mimetype)
524 post_body, content_type = encode_multipart_formdata(fields)
525 content_type =
''.join((
'multipart/mixed',) + content_type.partition(
';')[1:])
526 headers = {
'Content-Type': content_type}
531 req = self.
requestrequest(
"POST",
"/package/dbstore/module/{moduleName}/payload"
532 .format(moduleName=encode_name(module)),
533 data=post_body, headers=headers)
535 B2ERROR(f
"Could not create Payload: {e}")
538 return req.json()[
"payloadId"]
540 def create_iov(self, globalTagId, payloadId, firstExp, firstRun, finalExp, finalRun):
545 globalTagId (int): id of the globaltag, obtain with get_globalTagId()
546 payloadId (int): id of the payload, obtain from create_payload() or get_payloads()
547 firstExp (int): first experiment for which this iov is valid
548 firstRun (int): first run for which this iov is valid
549 finalExp (int): final experiment for which this iov is valid
550 finalRun (int): final run for which this iov is valid
553 payloadIovId of the created iov, None if creation was not successful
558 local_variables = locals()
559 variables = {e: int(local_variables[e])
for e
in
560 [
"globalTagId",
"payloadId",
"firstExp",
"firstRun",
"finalExp",
"finalRun"]}
562 B2ERROR(
"create_iov: All parameters need to be integers")
567 req = self.
requestrequest(
"POST",
"/globalTagPayload/{globalTagId},{payloadId}"
568 "/payloadIov/{firstExp},{firstRun},{finalExp},{finalRun}".format(**variables))
570 B2ERROR(f
"Could not create IOV: {e}")
573 return req.json()[
"payloadIovId"]
575 def get_iovs(self, globalTagName, payloadName=None):
576 """Return existing iovs for a given tag name. It returns a dictionary
577 which maps (payloadId, first runId, final runId) to iovId
580 globalTagName(str): Global tag name.
581 payloadName(str): Payload name (if None, selection by name is
586 req = self.
requestrequest(
"GET",
"/globalTag/{globalTagName}/globalTagPayloads"
587 .format(globalTagName=encode_name(globalTagName)))
593 for payload
in req.json():
594 payloadId = payload[
"payloadId"][
"payloadId"]
595 if payloadName
is not None:
596 if payload[
"payloadId"][
"basf2Module"][
"name"] != payloadName:
598 for iov
in payload[
"payloadIovs"]:
599 iovId = iov[
"payloadIovId"]
600 firstExp, firstRun = iov[
"expStart"], iov[
"runStart"]
601 finalExp, finalRun = iov[
"expEnd"], iov[
"runEnd"]
602 result[(payloadId, firstExp, firstRun, finalExp, finalRun)] = iovId
606 def upload(self, filename, global_tag, normalize=False, ignore_existing=False, nprocess=1, uploaded_entries=None):
608 Upload a testing payload storage to the conditions database.
611 filename (str): filename of the testing payload storage file that should be uploaded
612 global_tage (str): name of the globaltag to which the data should be uploaded
613 normalize (Union[bool, str]): if True the payload root files will be normalized to have the same checksum for the
614 same content, if normalize is a string in addition the file name in the root file metadata will be set to it
615 ignore_existing (bool): if True do not upload payloads that already exist
616 nprocess (int): maximal number of parallel uploads
617 uploaded_entries (list): the list of successfully uploaded entries
620 True if the upload was successful
625 B2INFO(f
"Reading payload list from {filename}")
626 entries = parse_testing_payloads_file(filename)
628 B2ERROR(f
"Problems with testing payload storage file {filename}, exiting")
632 B2INFO(f
"No payloads found in {filename}, exiting")
635 B2INFO(f
"Found {len(entries)} iovs to upload")
641 tagId = tagId[
"globalTagId"]
645 entries = sorted(set(reversed(entries)))
648 name = normalize
if normalize
is not True else None
650 e.normalize(name=name)
654 payloads = defaultdict(list)
656 payloads[(e.module, e.checksum)].append(e)
658 existing_payloads = {}
661 def upload_payload(item):
662 """Upload a payload file if necessary but first check list of existing payloads"""
664 if key
in existing_payloads:
665 B2INFO(f
"{key[0]} (md5:{key[1]}) already existing in database, skipping.")
666 payload_id = existing_payloads[key]
669 payload_id = self.
create_payloadcreate_payload(entry.module, entry.filename, entry.checksum)
670 if payload_id
is None:
673 B2INFO(f
"Created new payload {payload_id} for {entry.module} (md5:{entry.checksum})")
675 for entry
in entries:
676 entry.payload = payload_id
681 """Create an iov if necessary but first check the list of existing iovs"""
682 if entry.payload
is None:
685 iov_key = (entry.payload,) + entry.iov_tuple
686 if iov_key
in existing_iovs:
687 entry.iov = existing_iovs[iov_key]
688 B2INFO(f
"IoV {entry.iov_tuple} for {entry.module} (md5:{entry.checksum}) already existing in database, skipping.")
690 entry.payloadIovId = self.
create_iovcreate_iov(tagId, entry.payload, *entry.iov_tuple)
691 if entry.payloadIovId
is None:
694 B2INFO(f
"Created IoV {entry.iov_tuple} for {entry.module} (md5:{entry.checksum})")
699 with ThreadPoolExecutor(max_workers=nprocess)
as pool:
702 if not ignore_existing:
703 B2INFO(
"Downloading information about existing payloads and iovs...")
706 existing_payloads = {}
708 def create_future(iter, func, callback=None):
709 fn = pool.submit(iter, func)
710 if callback
is not None:
711 fn.add_done_callback(callback)
714 def update_iovs(iovs):
715 existing_iovs.update(iovs.result())
716 B2INFO(f
"Found {len(existing_iovs)} existing iovs in {global_tag}")
718 def update_payloads(payloads):
719 existing_payloads.update(payloads.result())
720 B2INFO(f
"Found {len(existing_payloads)} existing payloads")
722 create_future(self.
get_iovsget_iovs, global_tag, update_iovs)
724 for chunk
in chunks(payloads.keys(), 1000):
725 create_future(self.
check_payloadscheck_payloads, chunk, update_payloads)
727 futures_wait(futures)
730 failed_payloads = sum(0
if result
else 1
for result
in pool.map(upload_payload, payloads.items()))
731 if failed_payloads > 0:
732 B2ERROR(f
"{failed_payloads} payloads could not be uploaded")
736 for entry
in pool.map(create_iov, entries):
738 if uploaded_entries
is not None:
739 uploaded_entries.append(entry)
743 B2ERROR(f
"{failed_iovs} IoVs could not be created")
746 if uploaded_entries
is not None:
749 return failed_payloads + failed_iovs == 0
753 Upload a testing payload storage to a staging globaltag and create or update a jira issue
756 filename (str): filename of the testing payload storage file that should be uploaded
757 normalize (Union[bool, str]): if True the payload root files will be
758 normalized to have the same checksum for the same content, if
759 normalize is a string in addition the file name in the root file
760 metadata will be set to it
761 data (dict): a dictionary with the information provided by the user:
763 * task: category of globaltag, either main, online, prompt, data, mc, or analysis
764 * tag: the globaltag name
765 * request: type of request, either Update, New, or Modification. The latter two imply task == main because
766 if new payload classes are introduced or payload classes are modified then they will first be included in
767 the main globaltag. Here a synchronization of code and payload changes has to be managed.
768 If new or modified payload classes should be included in other globaltags they must already be in a release.
769 * pull-request: number of the pull request containing new or modified payload classes,
770 only for request == New or Modified
771 * backward-compatibility: description of what happens if the old payload is encountered by the updated code,
772 only for request == Modified
773 * forward-compatibility: description of what happens if a new payload is encountered by the existing code,
774 only for request == Modified
775 * release: the required release version
776 * reason: the reason for the request
777 * description: a detailed description for the globaltag manager
778 * issue: identifier of an existing jira issue (optional)
779 * user: name of the user
780 * time: time stamp of the request
782 password: the password for access to jira or the access token and secret for oauth access
785 True if the upload and jira issue creation/upload was successful
789 data[
'tag'] = upload_global_tag(data[
'task'])
790 if data[
'tag']
is None:
791 data[
'tag'] = f
"temp_{data['task']}_{data['user']}_{data['time']}"
795 if not self.
create_globalTagcreate_globalTag(data[
'tag'], data[
'reason'], data[
'user']):
799 B2INFO(f
"Uploading testing database {filename} to globaltag {data['tag']}")
801 if not self.
uploadupload(filename, data[
'tag'], normalize, uploaded_entries=entries):
806 issue = data[
'issue']
808 issue = jira_global_tag_v2(data[
'task'])
810 issue = {
"components": [{
"name":
"globaltag"}]}
813 if type(issue)
is tuple:
814 description = issue[1].format(**data)
818 |*Upload globaltag* | {data['tag']} |
819 |*Request reason* | {data['reason']} |
820 |*Required release* | {data['release']} |
821 |*Type of request* | {data['request']} |
823 if 'pull-request' in data.keys():
824 description += f
"|*Pull request* | \\#{data['pull-request']} |\n"
825 if 'backward-compatibility' in data.keys():
826 description += f
"|*Backward compatibility* | \\#{data['backward-compatibility']} |\n"
827 if 'forward-compatibility' in data.keys():
828 description += f
"|*Forward compatibility* | \\#{data['forward-compatibility']} |\n"
829 description +=
'|*Details* |' +
''.join(data[
'details']) +
' |\n'
830 if data[
'task'] ==
'online':
831 description +=
'|*Impact on data taking*|' +
''.join(data[
'data_taking']) +
' |\n'
834 description +=
'\nPayloads\n||Name||Revision||IoV||\n'
835 for entry
in entries:
836 description += f
"|{entry.module} | {entry.revision} | ({entry.iov_str()}) |\n"
839 if type(issue)
is dict:
840 issue[
"description"] = description
841 if "summary" in issue.keys():
842 issue[
"summary"] = issue[
"summary"].format(**data)
844 issue[
"summary"] = f
"Globaltag request for {data['task']} by {data['user']} at {data['time']}"
845 if "project" not in issue.keys():
846 issue[
"project"] = {
"key":
"BII"}
847 if "issuetype" not in issue.keys():
848 issue[
"issuetype"] = {
"name":
"Task"}
849 if data[
"task"] ==
"main":
850 issue[
"labels"] = [
"TUPPR"]
852 B2INFO(f
"Creating jira issue for {data['task']} globaltag request")
853 if isinstance(password, str):
854 response = requests.post(
'https://agira.desy.de/rest/api/latest/issue', auth=(data[
'user'], password),
855 json={
'fields': issue})
857 fields = {
'issue': json.dumps(issue)}
858 if 'user' in data.keys():
859 fields[
'user'] = data[
'user']
861 fields[
'token'] = password[0]
862 fields[
'secret'] = password[1]
863 response = requests.post(
'https://b2-master.belle2.org/cgi-bin/jira_issue.py', data=fields)
864 if response.status_code
in range(200, 210):
865 B2INFO(f
"Issue successfully created: https://agira.desy.de/browse/{response.json()['key']}")
867 B2ERROR(
'The creation of the issue failed: ' + requests.status_codes._codes[response.status_code][0])
874 new_issue_config = jira_global_tag_v2(data[
'task'])
875 if isinstance(new_issue_config, dict)
and "assignee" in new_issue_config:
876 user = new_issue_config[
'assignee'].get(
'name',
None)
877 if user
is not None and isinstance(password, str):
878 response = requests.post(f
'https://agira.desy.de/rest/api/latest/issue/{issue}/watchers',
879 auth=(data[
'user'], password), json=user)
880 if response.status_code
in range(200, 210):
881 B2INFO(f
"Added {user} as watcher to {issue}")
883 B2WARNING(f
"Could not add {user} as watcher to {issue}: {response.status_code}")
885 B2INFO(f
"Commenting on jira issue {issue} for {data['task']} globaltag request")
886 if isinstance(password, str):
887 response = requests.post(
'https://agira.desy.de/rest/api/latest/issue/%s/comment' % issue,
888 auth=(data[
'user'], password), json={
'body': description})
890 fields = {
'id': issue,
'user': user,
'comment': description}
892 fields[
'token'] = password[0]
893 fields[
'secret'] = password[1]
894 response = requests.post(
'https://b2-master.belle2.org/cgi-bin/jira_issue.py', data=fields)
895 if response.status_code
in range(200, 210):
896 B2INFO(f
"Issue successfully updated: https://agira.desy.de/browse/{issue}")
898 B2ERROR(
'The commenting of the issue failed: ' + requests.status_codes._codes[response.status_code][0])
904 def require_database_for_test(timeout=60, base_url=None):
905 """Make sure that the database is available and skip the test if not.
907 This function should be called in test scripts if they are expected to fail
908 if the database is down. It either returns when the database is ok or it
909 will signal test_basf2 that the test should be skipped and exit
912 if os.environ.get(
"BELLE2_CONDB_GLOBALTAG",
None) ==
"":
913 raise Exception(
"Access to the Database is disabled")
914 base_url_list = ConditionsDB.get_base_urls(base_url)
915 for url
in base_url_list:
917 req = requests.request(
"HEAD", url.rstrip(
'/') +
"/v2/globalTags")
918 req.raise_for_status()
919 except requests.RequestException
as e:
920 B2WARNING(f
"Problem connecting to {url}:\n {e}\n Trying next server ...")
924 print(
"TEST SKIPPED: No working database servers configured, giving up", file=sys.stderr)
928 def enable_debugging():
929 """Enable verbose output of python-requests to be able to debug http connections"""
933 import http.client
as http_client
935 http_client.HTTPConnection.debuglevel = 1
937 logging.basicConfig()
938 logging.getLogger().setLevel(logging.DEBUG)
939 requests_log = logging.getLogger(
"requests.packages.urllib3")
940 requests_log.setLevel(logging.DEBUG)
941 requests_log.propagate =
True
def get_payloads(self, global_tag=None)
def get_iovs(self, globalTagName, payloadName=None)
def upload(self, filename, global_tag, normalize=False, ignore_existing=False, nprocess=1, uploaded_entries=None)
def get_globalTagType(self, name)
def check_payloads(self, payloads, information="payloadId")
def has_globalTag(self, name)
def set_authentication(self, user, password, basic=True)
def create_iov(self, globalTagId, payloadId, firstExp, firstRun, finalExp, finalRun)
def request(self, method, url, message=None, *args, **argk)
def __init__(self, base_url=None, max_connections=10, retries=3)
def get_revisions(self, entries)
def create_globalTag(self, name, description, user)
def get_globalTagInfo(self, name)
_session
session object to get keep-alive support and connection pooling
def get_all_iovs(self, globalTag, exp=None, run=None, message=None)
def get_base_urls(given_url)
def staging_request(self, filename, normalize, data, password)
def create_payload(self, module, filename, checksum=None)
_base_url
base url to be prepended to all requests