Belle II Software development
framework.py
1#!/usr/bin/env python3
2
3
10
11"""
12This module implements several objects/functions to configure and run calibrations.
13These classes are used to construct the workflow of the calibration job.
14The actual processing code is mostly in the `caf.state_machines` module.
15"""
16
17__all__ = ["CalibrationBase", "Calibration", "Algorithm", "CAF"]
18
19import os
20from threading import Thread
21from time import sleep
22from pathlib import Path
23import shutil
24from glob import glob
25
26from basf2 import B2ERROR, B2WARNING, B2INFO, B2FATAL, B2DEBUG
27from basf2 import find_file
28from basf2 import conditions as b2conditions
29
30from abc import ABC, abstractmethod
31
32import caf
33from caf.utils import B2INFO_MULTILINE
34from caf.utils import past_from_future_dependencies
35from caf.utils import topological_sort
36from caf.utils import all_dependencies
37from caf.utils import method_dispatch
38from caf.utils import temporary_workdir
39from caf.utils import find_int_dirs
40from caf.utils import LocalDatabase
41from caf.utils import CentralDatabase
42from caf.utils import parse_file_uri
43
44import caf.strategies as strategies
45import caf.runners as runners
46from caf.backends import MaxSubjobsSplitter, MaxFilesSplitter
47from caf.state_machines import CalibrationMachine, ConditionError, MachineError
48from caf.database import CAFDB
49
50
51class Collection():
52 """
53 Keyword Arguments:
54 collector (str, basf2.Module): The collector module or module name for this `Collection`.
55 input_files (list[str]): The input files to be used for only this `Collection`.
56 pre_collection_path (basf2.Path): The reconstruction `basf2.Path` to be run prior to the Collector module.
57 database_chain (list[CentralDatabase, LocalDatabase]): The database chain to be used initially for this `Collection`.
58 output_patterns (list[str]): Output patterns of files produced by collector which will be used to pass to the
59 `Algorithm.data_input` function. Setting this here, replaces the default completely.
60 max_files_for_collector_job (int): Maximum number of input files sent to each collector subjob for this `Collection`.
61 Technically this sets the SubjobSplitter to be used, not compatible with max_collector_jobs.
62 max_collector_jobs (int): Maximum number of collector subjobs for this `Collection`.
63 Input files are split evenly between them. Technically this sets the SubjobSplitter to be used. Not compatible with
64 max_files_for_collector_job.
65 backend_args (dict): The args for the backend submission of this `Collection`.
66 """
67
69 default_max_collector_jobs = 1000
70
72 job_config = "collector_job.json"
73
74 def __init__(self,
75 collector=None,
76 input_files=None,
77 pre_collector_path=None,
78 database_chain=None,
79 output_patterns=None,
80 max_files_per_collector_job=None,
81 max_collector_jobs=None,
82 backend_args=None
83 ):
84 """
85 """
86
87 self.collector = collector
88
89 self.input_files = []
90 if input_files:
91 self.input_files = input_files
92
98 self.files_to_iovs = {}
99
104 if pre_collector_path:
105 self.pre_collector_path = pre_collector_path
106
110 self.output_patterns = ["CollectorOutput.root"]
111 if output_patterns:
112 self.output_patterns = output_patterns
113
114
116 self.splitter = None
117 if max_files_per_collector_job and max_collector_jobs:
118 B2FATAL("Cannot set both 'max_files_per_collector_job' and 'max_collector_jobs' of a collection!")
119 # \cond false positive doxygen warning
120 elif max_files_per_collector_job:
121 self.max_files_per_collector_job = max_files_per_collector_job
122 elif max_collector_jobs:
123 self.max_collector_jobs = max_collector_jobs
124 else:
126 # \endcond
127
128
130 self.backend_args = {}
131 if backend_args:
132 self.backend_args = backend_args
133
134 if database_chain:
135
138 self.database_chain = database_chain
139 else:
140 self.database_chain = []
141 # This may seem weird but the changes to the DB interface mean that they have effectively swapped from being
142 # described well by appending to a list to a deque. So we do bit of reversal to translate it back and make the
143 # most important GT the last one encountered.
144 for tag in reversed(b2conditions.default_globaltags):
145 self.use_central_database(tag)
146
147
148 self.job_script = Path(find_file("calibration/scripts/caf/run_collector_path.py")).absolute()
149 """The basf2 steering file that will be used for Collector jobs run by this collection.
150This script will be copied into subjob directories as part of the input sandbox."""
151
152
153 self.job_cmd = ["basf2", self.job_script.name, "--job-information job_info.json"]
154
155 def reset_database(self):
156 """
157 Remove everything in the database_chain of this Calibration, including the default central database
158 tag automatically included from `basf2.conditions.default_globaltags <ConditionsConfiguration.default_globaltags>`.
159 """
160 self.database_chain = []
161
162 def use_central_database(self, global_tag):
163 """
164 Parameters:
165 global_tag (str): The central database global tag to use for this calibration.
166
167 Using this allows you to add a central database to the head of the global tag database chain for this collection.
168 The default database chain is just the central one from
169 `basf2.conditions.default_globaltags <ConditionsConfiguration.default_globaltags>`.
170 The input file global tag will always be overridden and never used unless explicitly set.
171
172 To turn off central database completely or use a custom tag as the base, you should call `Calibration.reset_database`
173 and start adding databases with `Calibration.use_local_database` and `Calibration.use_central_database`.
174
175 Alternatively you could set an empty list as the input database_chain when adding the Collection to the Calibration.
176
177 NOTE!! Since ``release-04-00-00`` the behaviour of basf2 conditions databases has changed.
178 All local database files MUST now be at the head of the 'chain', with all central database global tags in their own
179 list which will be checked after all local database files have been checked.
180
181 So even if you ask for ``["global_tag1", "localdb/database.txt", "global_tag2"]`` to be the database chain, the real order
182 that basf2 will use them is ``["global_tag1", "global_tag2", "localdb/database.txt"]`` where the file is checked first.
183 """
184 central_db = CentralDatabase(global_tag)
185 self.database_chain.append(central_db)
186
187 def use_local_database(self, filename, directory=""):
188 """
189 Parameters:
190 filename (str): The path to the database.txt of the local database
191 directory (str): The path to the payloads directory for this local database.
192
193 Append a local database to the chain for this collection.
194 You can call this function multiple times and each database will be added to the chain IN ORDER.
195 The databases are applied to this collection ONLY.
196
197 NOTE!! Since release-04-00-00 the behaviour of basf2 conditions databases has changed.
198 All local database files MUST now be at the head of the 'chain', with all central database global tags in their own
199 list which will be checked after all local database files have been checked.
200
201 So even if you ask for ["global_tag1", "localdb/database.txt", "global_tag2"] to be the database chain, the real order
202 that basf2 will use them is ["global_tag1", "global_tag2", "localdb/database.txt"] where the file is checked first.
203 """
204 local_db = LocalDatabase(filename, directory)
205 self.database_chain.append(local_db)
206
207 @staticmethod
209 """
210 Parameters:
211 input_file (str): A local file/glob pattern or XROOTD URI
212
213 Returns:
214 list: A list of the URIs found from the initial string.
215 """
216 # By default we assume it is a local file path if no "scheme" is given
217 uri = parse_file_uri(input_file)
218 if uri.scheme == "file":
219 # For local files we also perform a glob just in case it is a wildcard pattern.
220 # That way we will have all the uris of files separately
221 uris = [parse_file_uri(f).geturl() for f in glob(input_file)]
222 else:
223 # Just let everything else through and hop the backend can figure it out
224 uris = [input_file]
225 return uris
226
227 @property
228 def input_files(self):
229 """
230 """
231 return self._input_files
232
233 @input_files.setter
234 def input_files(self, value):
235 """
236 """
237 if isinstance(value, str):
238 # If it's a string, we convert to a list of URIs
239
241 elif isinstance(value, list):
242 # If it's a list we loop and do the same thing
243 total_files = []
244 for pattern in value:
245 total_files.extend(self.uri_list_from_input_file(pattern))
246 self._input_files = total_files
247 else:
248 raise TypeError("Input files must be a list or string")
249
250 @property
251 def collector(self):
252 """
253 """
254 return self._collector
255
256 @collector.setter
257 def collector(self, collector):
258 """
259 """
260 # check if collector is already a module or if we need to create one
261 # from the name
262 if collector:
263 from basf2 import Module
264 if isinstance(collector, str):
265 from basf2 import register_module
266 collector = register_module(collector)
267 if not isinstance(collector, Module):
268 B2ERROR("Collector needs to be either a Module or the name of such a module")
269
270 self._collector = collector
271
272 def is_valid(self):
273 """
274 """
275 if (not self.collector or not self.input_files):
276 return False
277 else:
278 return True
279
280 @property
282 """
283 """
284 if self.splitter:
285 return self.splitter.max_subjobs
286 else:
287 return None
288
289 @max_collector_jobs.setter
290 def max_collector_jobs(self, value):
291 """
292 """
293 if value is None:
294 self.splitter = None
295 else:
296 self.splitter = MaxSubjobsSplitter(max_subjobs=value)
297
298 @property
300 """
301 """
302 if self.splitter:
303 return self.splitter.max_files_per_subjob
304 else:
305 return None
306
307 @max_files_per_collector_job.setter
309 """
310 """
311 if value is None:
312 self.splitter = None
313 else:
314 self.splitter = MaxFilesSplitter(max_files_per_subjob=value)
315
316
317class CalibrationBase(ABC, Thread):
318 """
319 Abstract base class of Calibration types. The CAF implements the :py:class:`Calibration` class which inherits from
320 this and runs the C++ CalibrationCollectorModule and CalibrationAlgorithm classes. But by inheriting from this
321 class and providing the minimal necessary methods/attributes you could plug in your own Calibration types
322 that doesn't depend on the C++ CAF at all and run everything in your own way.
323
324 .. warning:: Writing your own class inheriting from :py:class:`CalibrationBase` class is not recommended!
325 But it's there if you really need it.
326
327 Parameters:
328 name (str): Name of this calibration object. Should be unique if you are going to run it.
329
330 Keyword Arguments:
331 input_files (list[str]): Input files for this calibration. May contain wildcard expressions usable by `glob.glob`.
332 """
333
335 end_state = "completed"
336
338 fail_state = "failed"
339
340 def __init__(self, name, input_files=None):
341 """
342 """
343 super().__init__()
344
345 self.name = name
346
348
349 self.dependencies = []
350
357 if input_files:
358
359 self.input_files = input_files
360 else:
361 self.input_files = []
362
363
364 self.iov = None
365
367
369 self.save_payloads = True
370
372
373 @abstractmethod
374 def run(self):
375 """
376 The most important method. Runs inside a new Thread and is called from `CalibrationBase.start`
377 once the dependencies of this `CalibrationBase` have returned with state == end_state i.e. "completed".
378 """
379
380 @abstractmethod
381 def is_valid(self):
382 """
383 A simple method you should implement that will return True or False depending on whether
384 the Calibration has been set up correctly and can be run safely.
385 """
386
387 def depends_on(self, calibration):
388 """
389 Parameters:
390 calibration (`CalibrationBase`): The Calibration object which will produce constants that this one depends on.
391
392 Adds dependency of this calibration on another i.e. This calibration
393 will not run until the dependency has completed, and the constants produced
394 will be used via the database chain.
395
396 You can define multiple dependencies for a single calibration simply
397 by calling this multiple times. Be careful when adding the calibration into
398 the `CAF` not to add a circular/cyclic dependency. If you do the sort will return an
399 empty order and the `CAF` processing will fail.
400
401 This function appends to the `CalibrationBase.dependencies` and `CalibrationBase.future_dependencies` attributes of this
402 `CalibrationBase` and the input one respectively. This prevents us having to do too much recalculation later on.
403 """
404 # Check that we don't have two calibration names that are the same
405 if self.name != calibration.name:
406 # Tests if we have the calibrations added as dependencies already and adds if not
407 if calibration not in self.dependencies:
408 self.dependencies.append(calibration)
409 if self not in calibration.dependencies:
410 calibration.future_dependencies.append(self)
411 else:
412 B2WARNING(f"Tried to add {calibration} as a dependency for {self} but they have the same name."
413 "Dependency was not added.")
414
416 """
417 Checks if all of the Calibrations that this one depends on have reached a successful end state.
418 """
419 return all(map(lambda x: x.state == x.end_state, self.dependencies))
420
422 """
423 Returns the list of calibrations in our dependency list that have failed.
424 """
425 failed = []
426 for calibration in self.dependencies:
427 if calibration.state == self.fail_state:
428 failed.append(calibration)
429 return failed
430
431 def _apply_calibration_defaults(self, defaults):
432 """
433 We pass in default calibration options from the `CAF` instance here if called.
434 Won't overwrite any options already set.
435 """
436 for key, value in defaults.items():
437 try:
438 if getattr(self, key) is None:
439 setattr(self, key, value)
440 except AttributeError:
441 print(f"The calibration {self.name} does not support the attribute {key}.")
442
443
445 """
446 Every Calibration object must have at least one collector at least one algorithm.
447 You have the option to add in your collector/algorithm by argument here, or add them
448 later by changing the properties.
449
450 If you plan to use multiple `Collection` objects I recommend that you only set the name here and add the Collections
451 separately via `add_collection()`.
452
453 Parameters:
454 name (str): Name of this calibration. It should be unique for use in the `CAF`
455 Keyword Arguments:
456 collector (str, `basf2.Module`): Should be set to a CalibrationCollectorModule() or a string with the module name.
457 algorithms (list, ``ROOT.Belle2.CalibrationAlgorithm``): The algorithm(s) to use for this `Calibration`.
458 input_files (str, list[str]): Input files for use by this Calibration. May contain wildcards usable by `glob.glob`
459
460 A Calibration won't be valid in the `CAF` until it has all of these four attributes set. For example:
461
462 >>> cal = Calibration('TestCalibration1')
463 >>> col1 = register_module('CaTest')
464 >>> cal.add_collection('TestColl', col1)
465
466 or equivalently
467
468 >>> cal = Calibration('TestCalibration1', 'CaTest')
469
470 If you want to run a basf2 :py:class:`path <basf2.Path>` before your collector module when running over data
471
472 >>> cal.pre_collector_path = my_basf2_path
473
474 You don't have to put a RootInput module in this pre-collection path, but you can if
475 you need some special parameters. If you want to process sroot files the you have to explicitly add
476 SeqRootInput to your pre-collection path.
477 The inputFileNames parameter of (Seq)RootInput will be set by the CAF automatically for you.
478
479
480 You can use optional arguments to pass in some/all during initialisation of the `Calibration` class
481
482 >>> cal = Calibration( 'TestCalibration1', 'CaTest', [alg1,alg2], ['/path/to/file.root'])
483
484 you can change the input file list later on, before running with `CAF`
485
486 >>> cal.input_files = ['path/to/*.root', 'other/path/to/file2.root']
487
488 If you have multiple collections from calling `add_collection()` then you should instead set the pre_collector_path,
489 input_files, database chain etc from there. See `Collection`.
490
491 Adding the CalibrationAlgorithm(s) is easy
492
493 >>> alg1 = TestAlgo()
494 >>> cal.algorithms = alg1
495
496 Or equivalently
497
498 >>> cal.algorithms = [alg1]
499
500 Or for multiple algorithms for one collector
501
502 >>> alg2 = TestAlgo()
503 >>> cal.algorithms = [alg1, alg2]
504
505 Note that when you set the algorithms, they are automatically wrapped and stored as a Python class
506 `Algorithm`. To access the C++ algorithm clas underneath directly do:
507
508 >>> cal.algorithms[i].algorithm
509
510 If you have a setup function that you want to run before each of the algorithms, set that with
511
512 >>> cal.pre_algorithms = my_function_object
513
514 If you want a different setup for each algorithm use a list with the same number of elements
515 as your algorithm list.
516
517 >>> cal.pre_algorithms = [my_function1, my_function2, ...]
518
519 You can also specify the dependencies of the calibration on others
520
521 >>> cal.depends_on(cal2)
522
523 By doing this, the `CAF` will respect the ordering of the calibrations and will pass the
524 calibration constants created by earlier completed calibrations to dependent ones.
525 """
526
527 moves = ["submit_collector", "complete", "run_algorithms", "iterate", "fail_fully"]
528
529 alg_output_dir = "algorithm_output"
530
531 checkpoint_states = ["init", "collector_completed", "completed"]
532
533 default_collection_name = "default"
534
535 def __init__(self,
536 name,
537 collector=None,
538 algorithms=None,
539 input_files=None,
540 pre_collector_path=None,
541 database_chain=None,
542 output_patterns=None,
543 max_files_per_collector_job=None,
544 max_collector_jobs=None,
545 backend_args=None
546 ):
547 """
548 """
549
550 self.collections = {}
551
552 self._algorithms = []
553
554 # Default collection added, will have None type and requires setting later via `self.collector`, or will take the
555 # CollectorModule/module name directly.
557 Collection(collector,
558 input_files,
559 pre_collector_path,
560 database_chain,
561 output_patterns,
562 max_files_per_collector_job,
563 max_collector_jobs,
564 backend_args
565 ))
566
567 super().__init__(name, input_files)
568 if algorithms:
569
572 self.algorithms = algorithms
573
574 self.results = {}
575
577 self.max_iterations = None
578
582 self.ignored_runs = None
583 if self.algorithms:
584
588 if database_chain:
589
591 self.database_chain = database_chain
592 else:
593 self.database_chain = []
594 # This database is already applied to the `Collection` automatically, so don't do it again
595 for tag in reversed(b2conditions.default_globaltags):
596 self.use_central_database(tag, apply_to_default_collection=False)
597
601
603 self.backend = None
604
609
610 self.heartbeat = 3
611
612 self.machine = None
613
614 self._db_path = None
615
616 def add_collection(self, name, collection):
617 """
618 Parameters:
619 name (str): Unique name of this `Collection` in the Calibration.
620 collection (`Collection`): `Collection` object to use.
621
622 Adds a new `Collection` object to the `Calibration`. Any valid Collection will be used in the Calibration.
623 A default Collection is automatically added but isn't valid and won't run unless you have assigned a collector
624 + input files.
625 You can ignore the default one and only add your own custom Collections. You can configure the default from the
626 Calibration(...) arguments or after creating the Calibration object via directly setting the cal.collector, cal.input_files
627 attributes.
628 """
629 if name not in self.collections:
630 self.collections[name] = collection
631 else:
632 B2WARNING(f"A Collection with the name '{name}' already exists in this Calibration. It has not been added."
633 "Please use another name.")
634
635 def is_valid(self):
636 """
637 A full calibration consists of a collector AND an associated algorithm AND input_files.
638
639 Returns False if:
640 1) We are missing any of the above.
641 2) There are multiple Collections and the Collectors have mis-matched granularities.
642 3) Any of our Collectors have granularities that don't match what our Strategy can use.
643 """
644 if not self.algorithms:
645 B2WARNING(f"Empty algorithm list for {self.name}.")
646 return False
647
648 if not any([collection.is_valid() for collection in self.collections.values()]):
649 B2WARNING(f"No valid Collections for {self.name}.")
650 return False
651
652 granularities = []
653 for collection in self.collections.values():
654 if collection.is_valid():
655 collector_params = collection.collector.available_params()
656 for param in collector_params:
657 if param.name == "granularity":
658 granularities.append(param.values)
659 if len(set(granularities)) > 1:
660 B2WARNING("Multiple different granularities set for the Collections in this Calibration.")
661 return False
662
663 for alg in self.algorithms:
664 alg_type = type(alg.algorithm).__name__
665 incorrect_gran = [granularity not in alg.strategy.allowed_granularities for granularity in granularities]
666 if any(incorrect_gran):
667 B2WARNING(f"Selected strategy for {alg_type} does not match a collector's granularity.")
668 return False
669 return True
670
671 def reset_database(self, apply_to_default_collection=True):
672 """
673 Keyword Arguments:
674 apply_to_default_collection (bool): Should we also reset the default collection?
675
676 Remove everything in the database_chain of this Calibration, including the default central database tag automatically
677 included from `basf2.conditions.default_globaltags <ConditionsConfiguration.default_globaltags>`. This will NOT affect the
678 database chain of any `Collection` other than the default one. You can prevent the default Collection from having its chain
679 reset by setting 'apply_to_default_collection' to False.
680 """
681 self.database_chain = []
682 if self.default_collection_name in self.collections and apply_to_default_collection:
684
685 def use_central_database(self, global_tag, apply_to_default_collection=True):
686 """
687 Parameters:
688 global_tag (str): The central database global tag to use for this calibration.
689
690 Keyword Arguments:
691 apply_to_default_collection (bool): Should we also call use_central_database on the default collection (if it exists)
692
693 Using this allows you to append a central database to the database chain for this calibration.
694 The default database chain is just the central one from
695 `basf2.conditions.default_globaltags <ConditionsConfiguration.default_globaltags>`.
696 To turn off central database completely or use a custom tag as the base, you should call `Calibration.reset_database`
697 and start adding databases with `Calibration.use_local_database` and `Calibration.use_central_database`.
698
699 Note that the database chain attached to the `Calibration` will only affect the default `Collection` (if it exists),
700 and the algorithm processes. So calling:
701
702 >> cal.use_central_database("global_tag")
703
704 will modify the database chain used by all the algorithms assigned to this `Calibration`, and modifies the database chain
705 assigned to
706
707 >> cal.collections['default'].database_chain
708
709 But calling
710
711 >> cal.use_central_database(file_path, payload_dir, False)
712
713 will add the database to the Algorithm processes, but leave the default Collection database chain untouched.
714 So if you have multiple Collections in this Calibration *their database chains are separate*.
715 To specify an additional `CentralDatabase` for a different collection, you will have to call:
716
717 >> cal.collections['OtherCollection'].use_central_database("global_tag")
718 """
719 central_db = CentralDatabase(global_tag)
720 self.database_chain.append(central_db)
721 if self.default_collection_name in self.collections and apply_to_default_collection:
723
724 def use_local_database(self, filename, directory="", apply_to_default_collection=True):
725 """
726 Parameters:
727 filename (str): The path to the database.txt of the local database
728
729 Keyword Argumemts:
730 directory (str): The path to the payloads directory for this local database.
731 apply_to_default_collection (bool): Should we also call use_local_database on the default collection (if it exists)
732
733 Append a local database to the chain for this calibration.
734 You can call this function multiple times and each database will be added to the chain IN ORDER.
735 The databases are applied to this calibration ONLY.
736 The Local and Central databases applied via these functions are applied to the algorithm processes and optionally
737 the default `Collection` job as a database chain.
738 There are other databases applied to the processes later, checked by basf2 in this order:
739
740 1) Local Database from previous iteration of this Calibration.
741 2) Local Database chain from output of previous dependent Calibrations.
742 3) This chain of Local and Central databases where the last added is checked first.
743
744 Note that this function on the `Calibration` object will only affect the default `Collection` if it exists and if
745 'apply_to_default_collection' remains True. So calling:
746
747 >> cal.use_local_database(file_path, payload_dir)
748
749 will modify the database chain used by all the algorithms assigned to this `Calibration`, and modifies the database chain
750 assigned to
751
752 >> cal.collections['default'].database_chain
753
754 But calling
755
756 >> cal.use_local_database(file_path, payload_dir, False)
757
758 will add the database to the Algorithm processes, but leave the default Collection database chain untouched.
759
760 If you have multiple Collections in this Calibration *their database chains are separate*.
761 To specify an additional `LocalDatabase` for a different collection, you will have to call:
762
763 >> cal.collections['OtherCollection'].use_local_database(file_path, payload_dir)
764
765 """
766 local_db = LocalDatabase(filename, directory)
767 self.database_chain.append(local_db)
768 if self.default_collection_name in self.collections and apply_to_default_collection:
769 self.collections[self.default_collection_name].use_local_database(filename, directory)
770
772 """
773 """
774 if self.default_collection_name in self.collections:
775 return getattr(self.collections[self.default_collection_name], attr)
776 else:
777 B2WARNING(f"You tried to get the attribute '{attr}' from the Calibration '{self.name}', "
778 "but the default collection doesn't exist."
779 f"You should use the cal.collections['CollectionName'].{attr} to access a custom "
780 "collection's attributes directly.")
781 return None
782
783 def _set_default_collection_attribute(self, attr, value):
784 """
785 """
786 if self.default_collection_name in self.collections:
787 setattr(self.collections[self.default_collection_name], attr, value)
788 else:
789 B2WARNING(f"You tried to set the attribute '{attr}' from the Calibration '{self.name}', "
790 "but the default collection doesn't exist."
791 f"You should use the cal.collections['CollectionName'].{attr} to access a custom "
792 "collection's attributes directly.")
793
794 @property
795 def collector(self):
796 """
797 """
798 return self._get_default_collection_attribute("collector")
799
800 @collector.setter
801 def collector(self, collector):
802 """
803 """
804 # check if collector is already a module or if we need to create one
805 # from the name
806 from basf2 import Module
807 if isinstance(collector, str):
808 from basf2 import register_module
809 collector = register_module(collector)
810 if not isinstance(collector, Module):
811 B2ERROR("Collector needs to be either a Module or the name of such a module")
812
813 self._set_default_collection_attribute("collector", collector)
814
815 @property
816 def input_files(self):
817 """
818 """
819 return self._get_default_collection_attribute("input_files")
820
821 @input_files.setter
822 def input_files(self, files):
823 """
824 """
825 self._set_default_collection_attribute("input_files", files)
826
827 @property
828 def files_to_iovs(self):
829 """
830 """
831 return self._get_default_collection_attribute("files_to_iovs")
832
833 @files_to_iovs.setter
834 def files_to_iovs(self, file_map):
835 """
836 """
837 self._set_default_collection_attribute("files_to_iovs", file_map)
838
839 @property
841 """
842 """
843 return self._get_default_collection_attribute("pre_collector_path")
844
845 @pre_collector_path.setter
846 def pre_collector_path(self, path):
847 """
848 """
849 self._set_default_collection_attribute("pre_collector_path", path)
850
851 @property
853 """
854 """
855 return self._get_default_collection_attribute("output_patterns")
856
857 @output_patterns.setter
858 def output_patterns(self, patterns):
859 """
860 """
861 self._set_default_collection_attribute("output_patterns", patterns)
862
863 @property
865 """
866 """
867 return self._get_default_collection_attribute("max_files_per_collector_job")
868
869 @max_files_per_collector_job.setter
870 def max_files_per_collector_job(self, max_files):
871 """
872 """
873 self._set_default_collection_attribute("max_files_per_collector_job", max_files)
874
875 @property
877 """
878 """
879 return self._get_default_collection_attribute("max_collector_jobs")
880
881 @max_collector_jobs.setter
882 def max_collector_jobs(self, max_jobs):
883 """
884 """
885 self._set_default_collection_attribute("max_collector_jobs", max_jobs)
886
887 @property
888 def backend_args(self):
889 """
890 """
891 return self._get_default_collection_attribute("backend_args")
892
893 @backend_args.setter
894 def backend_args(self, args):
895 """
896 """
897 self._set_default_collection_attribute("backend_args", args)
898
899 @property
900 def algorithms(self):
901 """
902 """
903 return self._algorithms
904
905 @algorithms.setter
906 @method_dispatch
907 def algorithms(self, value):
908 """
909 """
910 from ROOT import Belle2 # noqa: make the Belle2 namespace available
911 from ROOT.Belle2 import CalibrationAlgorithm
912 if isinstance(value, CalibrationAlgorithm):
913 self._algorithms = [Algorithm(value)]
914 else:
915 B2ERROR(f"Something other than CalibrationAlgorithm instance passed in ({type(value)}). "
916 "Algorithm needs to inherit from Belle2::CalibrationAlgorithm")
917
918 @algorithms.fset.register(tuple)
919 @algorithms.fset.register(list)
920 def _(self, value):
921 """
922 Alternate algorithms setter for lists and tuples of CalibrationAlgorithms.
923 """
924 from ROOT import Belle2 # noqa: make the Belle2 namespace available
925 from ROOT.Belle2 import CalibrationAlgorithm
926 if value:
927 self._algorithms = []
928 for alg in value:
929 if isinstance(alg, CalibrationAlgorithm):
930 self._algorithms.append(Algorithm(alg))
931 else:
932 B2ERROR(f"Something other than CalibrationAlgorithm instance passed in {type(value)}."
933 "Algorithm needs to inherit from Belle2::CalibrationAlgorithm")
934
935 @property
936 def pre_algorithms(self):
937 """
938 Callback run prior to each algorithm iteration.
939 """
940 return [alg.pre_algorithm for alg in self.algorithms]
941
942 @pre_algorithms.setter
943 @method_dispatch
944 def pre_algorithms(self, func):
945 """
946 """
947 if func:
948 for alg in self.algorithms:
949 alg.pre_algorithm = func
950 else:
951 B2ERROR("Something evaluated as False passed in as pre_algorithm function.")
952
953 @pre_algorithms.fset.register(tuple)
954 @pre_algorithms.fset.register(list)
955 def _(self, values):
956 """
957 Alternate pre_algorithms setter for lists and tuples of functions, should be one per algorithm.
958 """
959 if values:
960 if len(values) == len(self.algorithms):
961 for func, alg in zip(values, self.algorithms):
962 alg.pre_algorithm = func
963 else:
964 B2ERROR("Number of functions and number of algorithms doesn't match.")
965 else:
966 B2ERROR("Empty container passed in for pre_algorithm functions")
967
968 @property
969 def strategies(self):
970 """
971 The `caf.strategies.AlgorithmStrategy` or `list` of them used when running the algorithm(s).
972 """
973 return [alg.strategy for alg in self.algorithms]
974
975 @strategies.setter
976 @method_dispatch
977 def strategies(self, strategy):
978 """
979 """
980 if strategy:
981 for alg in self.algorithms:
982 alg.strategy = strategy
983 else:
984 B2ERROR("Something evaluated as False passed in as a strategy.")
985
986 @strategies.fset.register(tuple)
987 @strategies.fset.register(list)
988 def _(self, values):
989 """
990 Alternate strategies setter for lists and tuples of functions, should be one per algorithm.
991 """
992 if values:
993 if len(values) == len(self.algorithms):
994 for strategy, alg in zip(strategies, self.algorithms):
995 alg.strategy = strategy
996 else:
997 B2ERROR("Number of strategies and number of algorithms doesn't match.")
998 else:
999 B2ERROR("Empty container passed in for strategies list")
1000
1001 def __repr__(self):
1002 """
1003 """
1004 return self.name
1005
1006 def run(self):
1007 """
1008 Main logic of the Calibration object.
1009 Will be run in a new Thread by calling the start() method.
1010 """
1011 with CAFDB(self._db_path, read_only=True) as db:
1012 initial_state = db.get_calibration_value(self.name, "checkpoint")
1013 initial_iteration = db.get_calibration_value(self.name, "iteration")
1014 B2INFO(f"Initial status of {self.name} found to be state={initial_state}, iteration={initial_iteration}")
1015 self.machine = CalibrationMachine(self,
1016 iov_to_calibrate=self.iov,
1017 initial_state=initial_state,
1018 iteration=initial_iteration)
1019
1020 self.state = initial_state
1021 self.machine.root_dir = Path(os.getcwd(), self.name)
1022 self.machine.collector_backend = self.backend
1023
1024 # Before we start running, let's clean up any iteration directories from iterations above our initial one.
1025 # Should prevent confusion between attempts if we fail again.
1026 all_iteration_paths = find_int_dirs(self.machine.root_dir)
1027 for iteration_path in all_iteration_paths:
1028 if int(iteration_path.name) > initial_iteration:
1029 shutil.rmtree(iteration_path)
1030
1031 while self.state != self.end_state and self.state != self.fail_state:
1032 if self.state == "init":
1033 try:
1034 B2INFO(f"Attempting collector submission for calibration {self.name}.")
1036 except Exception as err:
1037 B2FATAL(str(err))
1038
1039 self._poll_collector()
1040
1041 # If we failed take us to the final fail state
1042 if self.state == "collector_failed":
1043 self.machine.fail_fully()
1044 return
1045
1046 # It's possible that we might raise an error while attempting to run due
1047 # to some problems e.g. Missing collector output files
1048 # We catch the error and exit with failed state so the CAF will stop
1049 try:
1050 B2INFO(f"Attempting to run algorithms for calibration {self.name}.")
1051 self.machine.run_algorithms()
1052 except MachineError as err:
1053 B2ERROR(str(err))
1054 self.machine.fail()
1055
1056 # If we failed take us to the final fail state
1057 if self.machine.state == "algorithms_failed":
1058 self.machine.fail_fully()
1059 return
1060
1062 """
1063 """
1064 while self.state == "running_collector":
1065 try:
1066 self.machine.complete()
1067 # ConditionError is thrown when the conditions for the transition have returned false, it's not serious.
1068 except ConditionError:
1069 try:
1070 B2DEBUG(29, f"Checking if collector jobs for calibration {self.name} have failed.")
1071 self.machine.fail()
1072 except ConditionError:
1073 pass
1074 sleep(self.heartbeat) # Sleep until we want to check again
1075
1076 @property
1077 def state(self):
1078 """
1079 The current major state of the calibration in the database file. The machine may have a different state.
1080 """
1081 with CAFDB(self._db_path, read_only=True) as db:
1082 state = db.get_calibration_value(self.name, "state")
1083 return state
1084
1085 @state.setter
1086 def state(self, state):
1087 """
1088 """
1089 B2DEBUG(29, f"Setting {self.name} to state {state}.")
1090 with CAFDB(self._db_path) as db:
1091 db.update_calibration_value(self.name, "state", str(state))
1092 if state in self.checkpoint_states:
1093 db.update_calibration_value(self.name, "checkpoint", str(state))
1094 B2DEBUG(29, f"{self.name} set to {state}.")
1095
1096 @property
1097 def iteration(self):
1098 """
1099 Retrieves the current iteration number in the database file.
1100
1101 Returns:
1102 int: The current iteration number
1103 """
1104 with CAFDB(self._db_path, read_only=True) as db:
1105 iteration = db.get_calibration_value(self.name, "iteration")
1106 return iteration
1107
1108 @iteration.setter
1109 def iteration(self, iteration):
1110 """
1111 """
1112 B2DEBUG(29, f"Setting {self.name} to {iteration}.")
1113 with CAFDB(self._db_path) as db:
1114 db.update_calibration_value(self.name, "iteration", iteration)
1115 B2DEBUG(29, f"{self.name} set to {self.iteration}.")
1116
1117
1119 """
1120 Parameters:
1121 algorithm: The CalibrationAlgorithm instance that we want to execute.
1122 Keyword Arguments:
1123 data_input : An optional function that sets the input files of the algorithm.
1124 pre_algorithm : An optional function that runs just prior to execution of the algorithm.
1125 Useful for set up e.g. module initialisation
1126
1127 This is a simple wrapper class around the C++ CalibrationAlgorithm class.
1128 It helps to add functionality to algorithms for use by the Calibration and CAF classes rather
1129 than separating the logic into those classes directly.
1130
1131 This is **not** currently a class that a user should interact with much during `CAF`
1132 setup (unless you're doing something advanced).
1133 The `Calibration` class should be doing the most of the creation of the defaults for these objects.
1134
1135 Setting the `data_input` function might be necessary if you have set the `Calibration.output_patterns`.
1136 Also, setting the `pre_algorithm` to a function that should execute prior to each `strategies.AlgorithmStrategy`
1137 is often useful i.e. by calling for the Geometry module to initialise.
1138 """
1139
1140 def __init__(self, algorithm, data_input=None, pre_algorithm=None):
1141 """
1142 """
1143
1144 self.algorithm = algorithm
1145
1146 cppname = type(algorithm).__cpp_name__
1147
1148 self.name = cppname[cppname.rfind('::') + 2:]
1149
1152 self.data_input = data_input
1153 if not self.data_input:
1155
1158 self.pre_algorithm = pre_algorithm
1159
1162
1167 self.params = {}
1168
1169 def default_inputdata_setup(self, input_file_paths):
1170 """
1171 Simple setup to set the input file names to the algorithm. Applied to the data_input attribute
1172 by default. This simply takes all files returned from the `Calibration.output_patterns` and filters
1173 for only the CollectorOutput.root files. Then it sets them as input files to the CalibrationAlgorithm class.
1174 """
1175 collector_output_files = list(filter(lambda file_path: "CollectorOutput.root" == Path(file_path).name,
1176 input_file_paths))
1177 info_lines = [f"Input files used in {self.name}:"]
1178 info_lines.extend(collector_output_files)
1179 B2INFO_MULTILINE(info_lines)
1180 self.algorithm.setInputFileNames(collector_output_files)
1181
1182
1183class CAF():
1184 """
1185 Parameters:
1186 calibration_defaults (dict): A dictionary of default options for calibrations run by this `CAF` instance e.g.
1187
1188 >>> calibration_defaults={"max_iterations":2}
1189
1190 This class holds `Calibration` objects and processes them. It defines the initial configuration/setup
1191 for the calibrations. But most of the real processing is done through the `caf.state_machines.CalibrationMachine`.
1192
1193 The `CAF` class essentially does some initial setup, holds the `CalibrationBase` instances and calls the
1194 `CalibrationBase.start` when the dependencies are met.
1195
1196 Much of the checking for consistency is done in this class so that no processing is done with an invalid
1197 setup. Choosing which files to use as input should be done from outside during the setup of the `CAF` and
1198 `CalibrationBase` instances.
1199 """
1200
1201
1202 _db_name = "caf_state.db"
1203
1204 default_calibration_config = {
1205 "max_iterations": 5,
1206 "ignored_runs": []
1207 }
1208
1209 def __init__(self, calibration_defaults=None):
1210 """
1211 """
1212
1214
1217
1220
1221 self.output_dir = "calibration_results"
1222
1223 self.order = None
1224
1225 self._backend = None
1226
1227 self.heartbeat = 5
1228
1229 if not calibration_defaults:
1230 calibration_defaults = {}
1231
1233 self.calibration_defaults = {**self.default_calibration_config, **calibration_defaults}
1234
1235 self._db_path = None
1236
1237 def add_calibration(self, calibration):
1238 """
1239 Adds a `Calibration` that is to be used in this program to the list.
1240 Also adds an empty dependency list to the overall dictionary.
1241 You should not directly alter a `Calibration` object after it has been
1242 added here.
1243 """
1244 if calibration.is_valid():
1245 if calibration.name not in self.calibrations:
1246 self.calibrations[calibration.name] = calibration
1247 else:
1248 B2WARNING(f"Tried to add a calibration with the name {calibration.name} twice.")
1249 else:
1250 B2WARNING(f"Tried to add incomplete/invalid calibration ({calibration.name}) to the framework."
1251 "It was not added and will not be part of the final process.")
1252
1254 """
1255 This checks the future and past dependencies of each `Calibration` in the `CAF`.
1256 If any dependencies are not known to the `CAF` then they are removed from the `Calibration`
1257 object directly.
1258 """
1259 calibration_names = [calibration.name for calibration in self.calibrations.values()]
1260
1261 def is_dependency_in_caf(dependency):
1262 """
1263 Quick function to use with filter() and check dependencies against calibrations known to `CAF`
1264 """
1265 dependency_in_caf = dependency.name in calibration_names
1266 if not dependency_in_caf:
1267 B2WARNING(f"The calibration {dependency.name} is a required dependency but is not in the CAF."
1268 " It has been removed as a dependency.")
1269 return dependency_in_caf
1270
1271 # Check that there aren't dependencies on calibrations not added to the framework
1272 # Remove them from the calibration objects if there are.
1273 for calibration in self.calibrations.values():
1274 filtered_future_dependencies = list(filter(is_dependency_in_caf, calibration.future_dependencies))
1275 calibration.future_dependencies = filtered_future_dependencies
1276
1277 filtered_dependencies = list(filter(is_dependency_in_caf, calibration.dependencies))
1278 calibration.dependencies = filtered_dependencies
1279
1281 """
1282 - Uses dependency attributes of calibrations to create a dependency dictionary and passes it
1283 to a sorting algorithm.
1284 - Returns valid OrderedDict if sort was successful, empty one if it failed (most likely a cyclic dependency)
1285 """
1286 # First remove any dependencies on calibrations not added to the CAF
1288 # Filling dependencies dictionaries of CAF for sorting, only explicit dependencies for now
1289 # Note that they currently use the names not the calibration objects.
1290 for calibration in self.calibrations.values():
1291 future_dependencies_names = [dependency.name for dependency in calibration.future_dependencies]
1292 past_dependencies_names = [dependency.name for dependency in calibration.dependencies]
1293
1294 self.future_dependencies[calibration.name] = future_dependencies_names
1295 self.dependencies[calibration.name] = past_dependencies_names
1296 # Gives us a list of A (not THE) valid ordering and checks for cyclic dependencies
1297 order = topological_sort(self.future_dependencies)
1298 if not order:
1299 return False
1300
1301 # Get an ordered dictionary of the sort order but including all implicit dependencies.
1302 ordered_full_dependencies = all_dependencies(self.future_dependencies, order)
1303
1304 # Return all the implicit+explicit past dependencies
1305 full_past_dependencies = past_from_future_dependencies(ordered_full_dependencies)
1306 # Correct each calibration's dependency list to reflect the implicit dependencies
1307 for calibration in self.calibrations.values():
1308 full_deps = full_past_dependencies[calibration.name]
1309 explicit_deps = [cal.name for cal in calibration.dependencies]
1310 for dep in full_deps:
1311 if dep not in explicit_deps:
1312 calibration.dependencies.append(self.calibrations[dep])
1313 # At this point the calibrations have their full dependencies but they aren't in topological
1314 # sort order. Correct that here
1315 ordered_dependency_list = []
1316 for ordered_calibration_name in order:
1317 if ordered_calibration_name in [dep.name for dep in calibration.dependencies]:
1318 ordered_dependency_list.append(self.calibrations[ordered_calibration_name])
1319 calibration.dependencies = ordered_dependency_list
1320 order = ordered_full_dependencies
1321 # We should also patch in all of the implicit dependencies for the calibrations
1322 return order
1323
1325 """
1326 Makes sure that the CAF has a valid backend setup. If one isn't set by the user (or if the
1327 one that is stored isn't a valid Backend object) we should create a default Local backend.
1328 """
1329 if not isinstance(self._backend, caf.backends.Backend):
1330
1331 self.backend = caf.backends.Local()
1332
1334 """
1335 Checks all current calibrations and removes any invalid Collections from their collections list.
1336 """
1337 B2INFO("Checking for any invalid Collections in Calibrations.")
1338 for calibration in self.calibrations.values():
1339 valid_collections = {}
1340 for name, collection in calibration.collections.items():
1341 if collection.is_valid():
1342 valid_collections[name] = collection
1343 else:
1344 B2WARNING(f"Removing invalid Collection '{name}' from Calibration '{calibration.name}'.")
1345 calibration.collections = valid_collections
1346
1347 def run(self, iov=None):
1348 """
1349 Keyword Arguments:
1350 iov(`caf.utils.IoV`): IoV to calibrate for this processing run. Only the input files necessary to calibrate
1351 this IoV will be used in the collection step.
1352
1353 This function runs the overall calibration job, saves the outputs to the output_dir directory,
1354 and creates database payloads.
1355
1356 Upload of final databases is not done here. This simply creates the local databases in
1357 the output directory. You should check the validity of your new local database before uploading
1358 to the conditions DB via the basf2 tools/interface to the DB.
1359 """
1360 if not self.calibrations:
1361 B2FATAL("There were no Calibration objects to run. Maybe you tried to add invalid ones?")
1362 # Checks whether the dependencies we've added will give a valid order
1363 order = self._order_calibrations()
1364 if not order:
1365 B2FATAL("Couldn't order the calibrations properly. Could be a cyclic dependency.")
1366
1367 # Check that a backend has been set and use default Local() one if not
1368 self._check_backend()
1369
1371
1372 # Creates the overall output directory and reset the attribute to use an absolute path to it.
1373 self.output_dir = self._make_output_dir()
1374
1375 # Creates a SQLite DB to save the status of the various calibrations
1376 self._make_database()
1377
1378 # Enter the overall output dir during processing and opena connection to the DB
1379 with temporary_workdir(self.output_dir):
1380 db = CAFDB(self._db_path)
1381 db.open()
1382 db_initial_calibrations = db.query("select * from calibrations").fetchall()
1383 for calibration in self.calibrations.values():
1384 # Apply defaults given to the `CAF` to the calibrations if they aren't set
1385 calibration._apply_calibration_defaults(self.calibration_defaults)
1386 calibration._db_path = self._db_path
1387 calibration.output_database_dir = Path(self.output_dir, calibration.name, "outputdb").as_posix()
1388 calibration.iov = iov
1389 if not calibration.backend:
1390 calibration.backend = self.backend
1391 # Do some checking of the db to see if we need to add an entry for this calibration
1392 if calibration.name not in [db_cal[0] for db_cal in db_initial_calibrations]:
1393 db.insert_calibration(calibration.name)
1394 db.commit()
1395 else:
1396 for cal_info in db_initial_calibrations:
1397 if cal_info[0] == calibration.name:
1398 cal_initial_state = cal_info[2]
1399 cal_initial_iteration = cal_info[3]
1400 B2INFO(f"Previous entry in database found for {calibration.name}.")
1401 B2INFO(f"Setting {calibration.name} state to checkpoint state '{cal_initial_state}'.")
1402 calibration.state = cal_initial_state
1403 B2INFO(f"Setting {calibration.name} iteration to '{cal_initial_iteration}'.")
1404 calibration.iteration = cal_initial_iteration
1405 # Daemonize so that it exits if the main program exits
1406 calibration.daemon = True
1407
1408 db.close()
1409
1410 # Is it possible to keep going?
1411 keep_running = True
1412 while keep_running:
1413 keep_running = False
1414 # Do we have calibrations that may yet complete?
1415 remaining_calibrations = []
1416
1417 for calibration in self.calibrations.values():
1418 # Find the currently ended calibrations (may not be joined yet)
1419 if (calibration.state == CalibrationBase.end_state or calibration.state == CalibrationBase.fail_state):
1420 # Search for any alive Calibrations and join them
1421 if calibration.is_alive():
1422 B2DEBUG(29, f"Joining {calibration.name}.")
1423 calibration.join()
1424 else:
1425 if calibration.dependencies_met():
1426 if not calibration.is_alive():
1427 B2DEBUG(29, f"Starting {calibration.name}.")
1428 try:
1429 calibration.start()
1430 except RuntimeError:
1431 # Catch the case when the calibration just finished so it ended up here
1432 # in the "else" and not above where it should have been joined.
1433 B2DEBUG(29, f"{calibration.name} probably just finished, join it later.")
1434 remaining_calibrations.append(calibration)
1435 else:
1436 if not calibration.failed_dependencies():
1437 remaining_calibrations.append(calibration)
1438 if remaining_calibrations:
1439 keep_running = True
1440 # Loop over jobs that the calibrations want submitted and submit them.
1441 # We do this here because some backends don't like us submitting in parallel from multiple CalibrationThreads
1442 # So this is like a mini job queue without getting too clever with it
1443 for calibration in remaining_calibrations:
1444 for job in calibration.jobs_to_submit[:]:
1445 calibration.backend.submit(job)
1446 calibration.jobs_to_submit.remove(job)
1447 sleep(self.heartbeat)
1448
1449 B2INFO("Printing summary of final CAF status.")
1450 with CAFDB(self._db_path, read_only=True) as db:
1451 print(db.output_calibration_table())
1452
1453 @property
1454 def backend(self):
1455 """
1456 The `backend <backends.Backend>` that runs the collector job.
1457 When set, this is checked that a `backends.Backend` class instance was passed in.
1458 """
1459 return self._backend
1460
1461 @backend.setter
1462 def backend(self, backend):
1463 """
1464 """
1465 if isinstance(backend, caf.backends.Backend):
1466 self._backend = backend
1467 else:
1468 B2ERROR('Backend property must inherit from Backend class.')
1469
1471 """
1472 Creates the output directory. If it already exists we are now going to try and restart the program from the last state.
1473
1474 Returns:
1475 str: The absolute path of the new output_dir
1476 """
1477 p = Path(self.output_dir).resolve()
1478 if p.is_dir():
1479 B2INFO(f"{p.as_posix()} output directory already exists. "
1480 "We will try to restart from the previous finishing state.")
1481 return p.as_posix()
1482 else:
1483 p.mkdir(parents=True)
1484 if p.is_dir():
1485 return p.as_posix()
1486 else:
1487 raise FileNotFoundError(f"Attempted to create output_dir {p.as_posix()}, but it didn't work.")
1488
1490 """
1491 Creates the CAF status database. If it already exists we don't overwrite it.
1492 """
1493 self._db_path = Path(self.output_dir, self._db_name).absolute()
1494 if self._db_path.exists():
1495 B2INFO(f"Previous CAF database found {self._db_path}")
1496 # Will create a new database + tables, or do nothing but checks we can connect to existing one
1497 with CAFDB(self._db_path):
1498 pass
pre_algorithm
Function called after data_input but before algorithm.execute to do any remaining setup.
__init__(self, algorithm, data_input=None, pre_algorithm=None)
data_input
Function called before the pre_algorithm method to setup the input data that the CalibrationAlgorithm...
dict params
Parameters that could be used in the execution of the algorithm strategy/runner to modify behaviour.
algorithm
CalibrationAlgorithm instance (assumed to be true since the Calibration class checks)
strategy
The algorithm stratgey that will be used when running over the collected data.
default_inputdata_setup(self, input_file_paths)
dict dependencies
Dictionary of dependencies of Calibration objects, where value is the list of Calibration objects tha...
add_calibration(self, calibration)
_remove_missing_dependencies(self)
_order_calibrations(self)
int heartbeat
The heartbeat (seconds) between polling for Calibrations that are finished.
dict default_calibration_config
The defaults for Calibrations.
_make_database(self)
dict calibrations
Dictionary of calibrations for this CAF instance.
dict calibration_defaults
Default options applied to each calibration known to the CAF, if the Calibration has these defined by...
_backend
Private backend attribute.
dict future_dependencies
Dictionary of future dependencies of Calibration objects, where the value is all calibrations that wi...
_check_backend(self)
_db_path
The path of the SQLite DB.
order
The ordering and explicit future dependencies of calibrations.
__init__(self, calibration_defaults=None)
run(self, iov=None)
str _db_name
The name of the SQLite DB that gets created.
backend
backend property
_prune_invalid_collections(self)
_make_output_dir(self)
str output_dir
Output path to store results of calibration and bookkeeping information.
list input_files
Files used for collection procedure.
Definition framework.py:359
list dependencies
List of calibration objects, where each one is a dependency of this one.
Definition framework.py:349
dict files_to_iovs
File -> Iov dictionary, should be :
Definition framework.py:356
__init__(self, name, input_files=None)
Definition framework.py:340
_apply_calibration_defaults(self, defaults)
Definition framework.py:431
list future_dependencies
List of calibration objects that depend on this one.
Definition framework.py:347
str end_state
The name of the successful completion state.
Definition framework.py:335
str fail_state
The name of the failure state.
Definition framework.py:338
bool save_payloads
Marks this Calibration as one which has payloads that should be copied and uploaded.
Definition framework.py:369
list jobs_to_submit
A simple list of jobs that this Calibration wants submitted at some point.
Definition framework.py:371
str output_database_dir
The directory where we'll store the local database payloads from this calibration.
Definition framework.py:366
name
Name of calibration object.
Definition framework.py:345
depends_on(self, calibration)
Definition framework.py:387
iov
IoV which will be calibrated.
Definition framework.py:364
ignored_runs
List of ExpRun that will be ignored by this Calibration.
Definition framework.py:582
use_central_database(self, global_tag, apply_to_default_collection=True)
Definition framework.py:685
list database_chain
The database chain that is applied to the algorithms.
Definition framework.py:591
strategies
The strategy that the algorithm(s) will be run against.
Definition framework.py:587
dict results
Output results of algorithms for each iteration.
Definition framework.py:574
int heartbeat
This calibration's sleep time before rechecking to see if it can move state.
Definition framework.py:610
int collector_full_update_interval
While checking if the collector is finished we don't bother wastefully checking every subjob's status...
Definition framework.py:608
machine
The caf.state_machines.CalibrationMachine that we will run to process this calibration start to finis...
Definition framework.py:612
list checkpoint_states
Checkpoint states which we are allowed to restart from.
Definition framework.py:531
_db_path
Location of a SQLite database that will save the state of the calibration so that it can be restarted...
Definition framework.py:614
max_iterations
Variable to define the maximum number of iterations for this calibration specifically.
Definition framework.py:577
use_local_database(self, filename, directory="", apply_to_default_collection=True)
Definition framework.py:724
_get_default_collection_attribute(self, attr)
Definition framework.py:771
list _algorithms
Internal calibration algorithms stored for this calibration.
Definition framework.py:552
reset_database(self, apply_to_default_collection=True)
Definition framework.py:671
add_collection(self, name, collection)
Definition framework.py:616
algorithms
Algorithm classes that will be run by this Calibration.
Definition framework.py:572
backend
The backend <backends.Backend> we'll use for our collector submission in this calibration.
Definition framework.py:603
algorithms_runner
The class that runs all the algorithms in this Calibration using their assigned :py:class:caf....
Definition framework.py:600
max_files_per_collector_job(self)
Definition framework.py:864
dict collections
Collections stored for this calibration.
Definition framework.py:550
__init__(self, name, collector=None, algorithms=None, input_files=None, pre_collector_path=None, database_chain=None, output_patterns=None, max_files_per_collector_job=None, max_collector_jobs=None, backend_args=None)
Definition framework.py:546
str default_collection_name
Default collection name.
Definition framework.py:533
_set_default_collection_attribute(self, attr, value)
Definition framework.py:783
list job_cmd
The Collector caf.backends.Job.cmd attribute.
Definition framework.py:153
int default_max_collector_jobs
The default maximum number of collector jobs to create.
Definition framework.py:69
list input_files
Internal input_files stored for this calibration.
Definition framework.py:89
dict backend_args
Dictionary passed to the collector Job object to configure how the caf.backends.Backend instance shou...
Definition framework.py:130
_collector
Internal storage of collector attribute.
Definition framework.py:270
dict files_to_iovs
File -> Iov dictionary, should be :
Definition framework.py:98
list database_chain
The database chain used for this Collection.
Definition framework.py:138
__init__(self, collector=None, input_files=None, pre_collector_path=None, database_chain=None, output_patterns=None, max_files_per_collector_job=None, max_collector_jobs=None, backend_args=None)
Definition framework.py:83
list output_patterns
Output patterns of files produced by collector which will be used to pass to the Algorithm....
Definition framework.py:110
splitter
The SubjobSplitter to use when constructing collector subjobs from the overall Job object.
Definition framework.py:116
_input_files
set input files
Definition framework.py:240
pre_collector_path
Since many collectors require some different setup, if you set this attribute to a basf2....
Definition framework.py:103
max_files_per_collector_job(self)
Definition framework.py:299
collector
Collector module of this collection.
Definition framework.py:87
uri_list_from_input_file(input_file)
Definition framework.py:208
use_local_database(self, filename, directory="")
Definition framework.py:187
use_central_database(self, global_tag)
Definition framework.py:162
STL class.