5 The core module of the Belle II Analysis Software Framework.
9 import signal
as _signal
12 if _sys.version_info[0] < 3:
13 print(
"basf2 requires python3. Please run the steering files using basf2 "
14 "(or python3), not python")
18 from basf2
import _override_print
26 from basf2
import _constwrapper
29 basf2label =
'BASF2 (Belle Analysis Software Framework 2)'
30 basf2copyright =
'Copyright(C) 2010-2018 Belle II Collaboration'
39 _signal.signal(_signal.SIGINT, _signal.SIG_DFL)
42 def register_module(name_or_module, shared_lib_path=None, logLevel=None, debugLevel=None, **kwargs):
44 Register the module 'name' and return it (e.g. for adding to a path). This
45 function is intended to instantiate existing modules. To find out which
46 modules exist you can run :program:`basf2 -m` and to get details about the
47 parameters for each module you can use :program:`basf2 -m {modulename}`
49 Parameters can be passed directly to the module as keyword parameters or can
50 be set later using `Module.param`
52 >>> module = basf.register_module('EventInfoSetter', evtNumList=100, logLevel=LogLevel.ERROR)
53 >>> module.param("evtNumList", 100)
56 name_or_module: The name of the module type, may also be an existing
57 `Module` instance for which parameters should be set
58 shared_lib_path (str): An optional path to a shared library from which the
59 module should be loaded
60 logLevel (LogLevel): indicates the minimum severity of log messages
61 to be shown from this module. See `Module.set_log_level`
62 debugLevel (int): Number indicating the detail of debug messages, the
63 default level is 100. See `Module.set_debug_level`
64 kwargs: Additional parameters to be passed to the module.
67 You can also use `Path.add_module()` directly,
68 which accepts the same name, logging and module parameter arguments. There
69 is no need to register the module by hand if you will add it to the path in
73 if isinstance(name_or_module, pybasf2.Module):
74 module = name_or_module
76 module_name = name_or_module
77 if shared_lib_path
is not None:
78 module = pybasf2._register_module(module_name, shared_lib_path)
80 module = pybasf2._register_module(module_name)
84 if logLevel
is not None:
85 module.set_log_level(logLevel)
86 if debugLevel
is not None:
87 module.set_debug_level(debugLevel)
92 def set_module_parameters(path, name=None, type=None, recursive=False, **kwargs):
93 """Set the given set of parameters for all `modules <Module>` in a path which
94 have the given ``name`` (see `Module.set_name`)
96 Usage is similar to `register_module()` but this function will not create
97 new modules but just adjust parameters for modules already in a `Path`
99 >>> set_module_parameters(path, "Geometry", components=["PXD"], logLevel=LogLevel.WARNING)
102 path (basf2.Path): The path to search for the modules
103 name (str): Then name of the module to set parameters for
104 type (str): The type of the module to set parameters for.
105 recursive (bool): if True also look in paths connected by conditions or `Path.for_each()`
106 kwargs: Named parameters to be set for the module, see `register_module()`
109 if name
is None and type
is None:
110 raise ValueError(
"At least one of name or type has to be given")
113 raise ValueError(
"no module parameters given")
116 for module
in path.modules():
117 if (name
is None or module.name() == name)
and (type
is None or module.type() == type):
120 register_module(module, **kwargs)
124 if module.has_condition():
125 for condition_path
in module.get_all_condition_paths():
126 set_module_parameters(condition_path, name, type, recursive, **kwargs)
127 if module.type() ==
"SubEvent":
128 for subpath
in [p.values
for p
in module.available_params()
if p.name ==
"path"]:
129 set_module_parameters(subpath, name, type, recursive, **kwargs)
132 raise KeyError(
"No module with given name found anywhere in the path")
137 Creates a new path and returns it. You can also instantiate `basf2.Path` directly.
139 return pybasf2.Path()
142 def process(path, max_event=0):
144 Start processing events using the modules in the given `basf2.Path` object.
146 Can be called multiple times in one steering file (some restrictions apply:
147 modules need to perform proper cleanup & reinitialisation, if Geometry is
148 involved this might be difficult to achieve.)
150 When used in a Jupyter notebook this function will automatically print a
151 nice progress bar and display the log messages in an advanced way once the
152 processing is complete.
155 This also means that in a Jupyter Notebook, modifications to class members
156 or global variables will not be visible after processing is complete as
157 the processing is performed in a subprocess.
159 To restore the old behavior you can use ``basf2.core.process()`` which
160 will behave exactly identical in Jupyter notebooks as it does in normal
163 from basf2 import core
168 path: The path with which the processing starts
169 max_event: The maximal number of events which will be processed,
172 .. versionchanged:: release-03-00-00
173 automatic Jupyter integration
179 ipython = get_ipython()
180 history =
"\n".join(e[2]
for e
in ipython.history_manager.get_range())
181 from ROOT
import Belle2
187 if pybasf2.get_pickle_path() !=
"":
188 from . pickle_path
import check_pickle_path
189 path = check_pickle_path(path)
195 pybasf2.B2INFO(
"Starting event processing, random seed is set to '" + pybasf2.get_random_seed() +
"'")
198 pybasf2._process(path, max_event)
200 pybasf2._process(path)
203 def set_log_level(level):
205 Sets the global log level which specifies up to which level the
206 logging messages will be shown
209 level (basf2.LogLevel): minimum severity of messages to be logged
212 logging.log_level = level
215 def set_debug_level(level):
217 Sets the global debug level which specifies up to which level the
218 debug messages should be shown
221 level (int): The debug level. The default value is 100
224 logging.debug_level = level
227 def log_to_console(color=False):
229 Adds the standard output stream to the list of logging destinations.
230 The shell logging destination is
231 added to the list by the framework by default.
234 logging.add_console(color)
237 def log_to_file(filename, append=False):
239 Adds a text file to the list of logging destinations.
242 filename: The path and filename of the text file
243 append: Should the logging system append the messages to the end of the
244 file (True) or create a new file for each event processing session (False).
248 logging.add_file(filename, append)
253 Resets the logging by removing all logging destinations
259 def _add_module(self, module, logLevel=None, debugLevel=None, **kwargs):
261 Add given module (either object or name) at the end of this path.
262 All unknown arguments are passed as module parameters.
264 >>> path = create_path()
265 >>> path.add_module('EventInfoSetter', evtNumList=100, logLevel=LogLevel.ERROR)
266 <pybasf2.Module at 0x1e356e0>
268 >>> path = create_path()
269 >>> eventinfosetter = register_module('EventInfoSetter')
270 >>> path.add_module(eventinfosetter)
271 <pybasf2.Module at 0x2289de8>
273 module = register_module(module, logLevel=logLevel, debugLevel=debugLevel, **kwargs)
274 self._add_module_object(module)
278 def _add_independent_path(self, skim_path, ds_ID='', merge_back_event=[]):
280 Add given path at the end of this path and ensure all modules there
281 do not influence the main DataStore. You can thus use modules in
282 skim_path to clean up e.g. the list of particles, save a skimmed uDST file,
283 and continue working with the unmodified DataStore contents outside of
287 ds_ID: can be specified to give a defined ID to the temporary DataStore,
288 otherwise, a random name will be generated.
289 merge_back_event: is a list of object/array names (of event durability)
290 that will be merged back into the main path.
292 self._add_independent_path(skim_path, ds_ID, merge_back_event)
295 pybasf2.Path.add_module = _add_module
296 pybasf2.Path.add_independent_path = _add_independent_path
299 def get_default_global_tags():
301 Return the list of default globaltags in one string separated with comma
303 .. deprecated:: release-04-00-00
304 Please use `basf2.conditions.default_globaltags <ConditionsConfiguration.default_globaltags>` instead
306 B2WARNING(
"basf2.get_default_global_tags() is deprecated, please use basf2.conditions.default_globaltags")
307 return ",".join(conditions.default_globaltags)
310 def set_central_database_networkparams(**argk):
312 Set some expert database connection details
314 .. deprecated:: release-04-00-00
315 Please use `basf2.conditions.expert_settings <ConditionsConfiguration.expert_settings>` instead
317 B2WARNING(
"basf2.set_central_database_networkparams() is deprecated, please use basf2.conditions.expert_settings()")
318 return conditions.expert_settings(**argk)
321 def set_central_serverlist(serverlist):
323 Set the list of database servers
325 .. deprecated:: release-04-00-00
326 Please use `basf2.conditions.metadata_providers <ConditionsConfiguration.metadata_providers>` instead
328 B2WARNING(
"basf2.set_central_serverlist() is deprecated, please use basf2.conditions.metadata_providers instead")
329 conditions.metadata_providers = serverlist + [e
for e
in conditions.metadata_providers
if not e.startswith(
"http")]