12The core module of the Belle II Analysis Software Framework. 
   16import signal 
as _signal
 
   19if _sys.version_info[0] < 3:
 
   20    print(
"basf2 requires python3. Please run the steering files using basf2 " 
   21          "(or python3), not python")
 
   25from basf2 
import _override_print  
 
   30from pybasf2 
import logging
 
   34from basf2 
import _constwrapper  
 
   38basf2label = 
'basf2 (Belle II Analysis Software Framework)' 
   40basf2copyright = 
'Copyright(C) 2010-2024 Members of the Belle II Collaboration' 
   42basf2license = 
'(See "basf2 --license" for more information.)' 
   51_signal.signal(_signal.SIGINT, _signal.SIG_DFL)
 
   54def register_module(name_or_module, shared_lib_path=None, logLevel=None, debugLevel=None, **kwargs):
 
   56    Register the module 'name' and return it (e.g. for adding to a path). This 
   57    function is intended to instantiate existing modules. To find out which 
   58    modules exist you can run :program:`basf2 -m` and to get details about the 
   59    parameters for each module you can use :program:`basf2 -m {modulename}` 
   61    Parameters can be passed directly to the module as keyword parameters or can 
   62    be set later using `Module.param` 
   64    >>> module = basf2.register_module('EventInfoSetter', evtNumList=100, logLevel=LogLevel.ERROR) 
   65    >>> module.param("evtNumList", 100) 
   68      name_or_module: The name of the module type, may also be an existing 
   69           `Module` instance for which parameters should be set 
   70      shared_lib_path (str): An optional path to a shared library from which the 
   71           module should be loaded 
   72      logLevel (LogLevel): indicates the minimum severity of log messages 
   73           to be shown from this module. See `Module.set_log_level` 
   74      debugLevel (int): Number indicating the detail of debug messages, the 
   75           default level is 100. See `Module.set_debug_level` 
   76      kwargs: Additional parameters to be passed to the module. 
   79        You can also use `Path.add_module()` directly, 
   80        which accepts the same name, logging and module parameter arguments. There 
   81        is no need to register the module by hand if you will add it to the path in 
   85    if isinstance(name_or_module, pybasf2.Module):
 
   86        module = name_or_module
 
   88        module_name = name_or_module
 
   89        if shared_lib_path 
is not None:
 
   90            module = pybasf2._register_module(module_name, shared_lib_path)
 
   92            module = pybasf2._register_module(module_name)
 
   96    if logLevel 
is not None:
 
   97        module.set_log_level(logLevel)
 
   98    if debugLevel 
is not None:
 
   99        module.set_debug_level(debugLevel)
 
  104def set_module_parameters(path, name=None, type=None, recursive=False, **kwargs):
 
  105    """Set the given set of parameters for all `modules <Module>` in a path which 
  106    have the given ``name`` (see `Module.set_name`) 
  108    Usage is similar to `register_module()` but this function will not create 
  109    new modules but just adjust parameters for modules already in a `Path` 
  111    >>> set_module_parameters(path, "Geometry", components=["PXD"], logLevel=LogLevel.WARNING) 
  114      path (basf2.Path): The path to search for the modules 
  115      name (str): Then name of the module to set parameters for 
  116      type (str): The type of the module to set parameters for. 
  117      recursive (bool): if True also look in paths connected by conditions or `Path.for_each()` 
  118      kwargs: Named parameters to be set for the module, see  `register_module()` 
  121    if name 
is None and type 
is None:
 
  122        raise ValueError(
"At least one of name or type has to be given")
 
  125        raise ValueError(
"no module parameters given")
 
  128    for module 
in path.modules():
 
  129        if (name 
is None or module.name() == name) 
and (type 
is None or module.type() == type):
 
  132            register_module(module, **kwargs)
 
  136            if module.has_condition():
 
  137                for condition_path 
in module.get_all_condition_paths():
 
  138                    set_module_parameters(condition_path, name, type, recursive, **kwargs)
 
  139            if module.type() == 
"SubEvent":
 
  140                for subpath 
in [p.values 
for p 
in module.available_params() 
if p.name == 
"path"]:
 
  141                    set_module_parameters(subpath, name, type, recursive, **kwargs)
 
  144        raise KeyError(
"No module with given name found anywhere in the path")
 
  147def remove_module(old_path, name=None):
 
  148    """Provides a new path with all modules that were in the ``old_path`` \ 
  149    except the one with the given ``name`` (see `Module.set_name`) 
  151    Usage is very simple, in this example we remove Geometry the path: 
  153    >>> main = remove_module(main, "Geometry") 
  156      old_path (basf2.Path): The path to search for the module 
  157      name (str): Then name of the module you want to remove 
  161        raise ValueError(
"You should provide the module name")
 
  163    new_path = create_path()
 
  165    for module 
in old_path.modules():
 
  166        if name != module.name():
 
  167            new_path.add_module(module)
 
  174    Creates a new path and returns it. You can also instantiate `basf2.Path` directly. 
  176    return pybasf2.Path()
 
  179def process(path, max_event=0):
 
  181    Start processing events using the modules in the given `basf2.Path` object. 
  183    Can be called multiple times in one steering file (some restrictions apply: 
  184    modules need to perform proper cleanup & reinitialisation, if Geometry is 
  185    involved this might be difficult to achieve.) 
  187    When used in a Jupyter notebook this function will automatically print a 
  188    nice progress bar and display the log messages in an advanced way once the 
  189    processing is complete. 
  192     This also means that in a Jupyter Notebook, modifications to class members 
  193     or global variables will not be visible after processing is complete as 
  194     the processing is performed in a subprocess. 
  196     To restore the old behavior you can use ``basf2.core.process()`` which 
  197     will behave exactly identical in Jupyter notebooks as it does in normal 
  200           from basf2 import core 
  205      path: The path with which the processing starts 
  206      max_event:  The maximal number of events which will be processed, 
  209    .. versionchanged:: release-03-00-00 
  210       automatic Jupyter integration 
  216        ipython = get_ipython()  
 
  217        history = 
"\n".join(e[2] 
for e 
in ipython.history_manager.get_range())
 
  218        from ROOT 
import Belle2
 
  224    if pybasf2.get_pickle_path() != 
"":
 
  226        path = check_pickle_path(path)
 
  232    pybasf2.B2INFO(
"Starting event processing, random seed is set to '" + pybasf2.get_random_seed() + 
"'")
 
  235        pybasf2._process(path, max_event)
 
  237        pybasf2._process(path)
 
  240def set_log_level(level):
 
  242    Sets the global log level which specifies up to which level the 
  243    logging messages will be shown 
  246      level (basf2.LogLevel): minimum severity of messages to be logged 
  249    logging.log_level = level
 
  252def set_debug_level(level):
 
  254    Sets the global debug level which specifies up to which level the 
  255    debug messages should be shown 
  258      level (int): The debug level. The default value is 100 
  261    logging.debug_level = level
 
  264def log_to_console(color=False):
 
  266    Adds the standard output stream to the list of logging destinations. 
  267    The shell logging destination is 
  268    added to the list by the framework by default. 
  271    logging.add_console(color)
 
  274def log_to_file(filename, append=False):
 
  276    Adds a text file to the list of logging destinations. 
  279      filename: The path and filename of the text file 
  280      append: Should the logging system append the messages to the end of the 
  281         file (True) or create a new file for each event processing session (False). 
  285    logging.add_file(filename, append)
 
  290    Resets the logging by removing all logging destinations 
  296def _add_module(self, module, logLevel=None, debugLevel=None, **kwargs):
 
  298    Add given module (either object or name) at the end of this path. 
  299    All unknown arguments are passed as module parameters. 
  301    >>> path = create_path() 
  302    >>> path.add_module('EventInfoSetter', evtNumList=100, logLevel=LogLevel.ERROR) 
  303    <pybasf2.Module at 0x1e356e0> 
  305    >>> path = create_path() 
  306    >>> eventinfosetter = register_module('EventInfoSetter') 
  307    >>> path.add_module(eventinfosetter) 
  308    <pybasf2.Module at 0x2289de8> 
  310    module = register_module(module, logLevel=logLevel, debugLevel=debugLevel, **kwargs)
 
  311    self._add_module_object(module)
 
  315def _add_independent_path(self, skim_path, ds_ID='', merge_back_event=None):
 
  317    Add given path at the end of this path and ensure all modules there 
  318    do not influence the main DataStore. You can thus use modules in 
  319    skim_path to clean up e.g. the list of particles, save a skimmed uDST file, 
  320    and continue working with the unmodified DataStore contents outside of 
  324      ds_ID: can be specified to give a defined ID to the temporary DataStore, 
  325        otherwise, a random name will be generated. 
  326      merge_back_event: is a list of object/array names (of event durability) 
  327        that will be merged back into the main path. 
  329    if merge_back_event 
is None:
 
  330        merge_back_event = []
 
  331    self._add_independent_path(skim_path, ds_ID, merge_back_event)
 
  334def _add_independent_merge_path(
 
  338        merge_back_event=None,
 
  339        consistency_check=None,
 
  341        merge_same_file=False):
 
  343    Merge specified content of DataStore of independent path into DataStore of main path 
  344    on a per event level (add tracks/cluster from both events,...). 
  347      skim_path: independent path to be merged 
  348      ds_ID: can be specified to give a defined ID to the temporary DataStore, 
  349        otherwise, a random name will be generated (option for developers). 
  350      merge_back_event: is a list of object/array names (of event durability) 
  351        that will be merged back into the main path. 
  352      consistency_check: perform additional consistency checks on the objects from two paths. 
  353        If they are not satisfied, the skim_path proceeds to the next event on the path. 
  354        Currently supported value is "charge" that uses EventExtraInfo "charge" of the two paths, 
  355        that must be specified by the user, ensuring correct configuration of the combined event. 
  356        See CheckMergingConsistencyModule for more details. 
  357      event_mixing: apply event mixing (merge each event from first path with each event of second path) 
  358      merge_same_file: merge events from single file (useful for mixing) 
  360    if merge_back_event 
is None:
 
  361        merge_back_event = []
 
  362    if consistency_check 
is None:
 
  363        consistency_check = 
"" 
  366            pybasf2.B2INFO(
"add_independent_merge_path: merge_same_file requires event_mixing, setting it to True")
 
  368    for module 
in skim_path.modules():
 
  369        if module.type() == 
"RootInput":
 
  370            module.param(
"isSecondaryInput", 
True)
 
  371    self._add_independent_merge_path(skim_path, ds_ID, merge_back_event, consistency_check, event_mixing, merge_same_file)
 
  374pybasf2.Path.add_module = _add_module
 
  375pybasf2.Path.add_independent_path = _add_independent_path
 
  376pybasf2.Path.add_independent_merge_path = _add_independent_merge_path
 
static Environment & Instance()
Static method to get a reference to the Environment instance.