Belle II Software  release-05-01-25
path_utils.py
1 import basf2
2 from softwaretrigger import constants
3 import modularAnalysis
4 import stdV0s
5 import vertex
6 from geometry import check_components
7 import reconstruction
8 
9 
10 def add_online_dqm(path, run_type, dqm_environment, components, dqm_mode, create_hlt_unit_histograms=False):
11  """
12  Add DQM plots for a specific run type and dqm environment
13  """
14 
15  # Local imports, as the dqm package is not checked out by default
16  from daqdqm.collisiondqm import add_collision_dqm
17  from daqdqm.cosmicdqm import add_cosmic_dqm
18 
19  if run_type == constants.RunTypes.beam:
20  add_collision_dqm(path, components=components, dqm_environment=dqm_environment,
21  dqm_mode=dqm_mode, create_hlt_unit_histograms=create_hlt_unit_histograms)
22  elif run_type == constants.RunTypes.cosmic:
23  add_cosmic_dqm(path, components=components, dqm_environment=dqm_environment,
24  dqm_mode=dqm_mode)
25  else:
26  basf2.B2FATAL("Run type {} not supported.".format(run_type))
27 
28  if dqm_mode in ["dont_care", "all_events"]:
29  path.add_module('DelayDQM', title=dqm_environment, histogramDirectoryName='DAQ')
30 
31 
32 def add_hlt_dqm(path, run_type, components, dqm_mode, create_hlt_unit_histograms=False):
33  """
34  Add all the DQM modules for HLT to the path
35  """
36  add_online_dqm(
37  path,
38  run_type=run_type,
39  dqm_environment=constants.Location.hlt.name,
40  components=components,
41  dqm_mode=dqm_mode.name,
42  create_hlt_unit_histograms=create_hlt_unit_histograms)
43  path.add_module('StatisticsSummary').set_name('Sum_HLT_DQM_'+dqm_mode.name)
44 
45 
46 def add_expressreco_dqm(path, run_type, components):
47  """
48  Add all the DQM modules for ExpressReco to the path
49  """
50  add_online_dqm(path, run_type=run_type, dqm_environment=constants.Location.expressreco.name, components=components,
51  dqm_mode=constants.DQMModes.dont_care.name)
52 
53 
54 def add_geometry_if_not_present(path):
55  """
56  Add the geometry and gearbox module if it was not already added to the path
57  """
58  if 'Gearbox' not in path:
59  path.add_module('Gearbox')
60 
61  if 'Geometry' not in path:
62  path.add_module('Geometry', useDB=True)
63 
64 
65 def add_store_only_metadata_path(path):
66  """
67  Helper function to create a path which deletes (prunes) everything from the data store except
68  things that are really needed, e.g. the event meta data and the results of the software trigger module.
69 
70  After this path was processed, you can not use the data store content any more to do reconstruction (because
71  it is more or less empty), but can only output it to a (S)ROOT file.
72  """
73  path.add_module("PruneDataStore", matchEntries=constants.ALWAYS_SAVE_OBJECTS).set_name("KeepMetaData")
74 
75 
76 def add_store_only_rawdata_path(path, additonal_store_arrays_to_keep=None):
77  """
78  Helper function to create a path which deletes (prunes) everything from the data store except
79  raw objects from the detector and things that are really needed, e.g. the event meta data and the results of the
80  software trigger module.
81 
82  After this path was processed, you can not use the data store content any more to do reconstruction (because
83  it is more or less empty), but can only output it to a (S)ROOT file.
84  """
85  entries_to_keep = constants.ALWAYS_SAVE_OBJECTS + constants.RAWDATA_OBJECTS
86 
87  if additonal_store_arrays_to_keep:
88  entries_to_keep += additonal_store_arrays_to_keep
89 
90  path.add_module("PruneDataStore", matchEntries=entries_to_keep).set_name("KeepRawData")
91 
92 
93 def add_filter_software_trigger(path,
94  store_array_debug_prescale=0,
95  use_random_numbers_for_prescale=True):
96  """
97  Add the SoftwareTrigger for the filter cuts to the given path.
98 
99  Only the calculation of the cuts is implemented here - the cut logic has to be done
100  using the module return value.
101 
102  :param path: The path to which the module should be added.
103  :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
104  cut calculations in the data store.
105  :param use_random_numbers_for_prescale: If True, the prescales are applied using randomly generated numbers,
106  otherwise are applied using an internal counter.
107  :return: the software trigger module
108  """
109  hlt_cut_module = path.add_module("SoftwareTrigger",
110  baseIdentifier="filter",
111  preScaleStoreDebugOutputToDataStore=store_array_debug_prescale,
112  useRandomNumbersForPreScale=use_random_numbers_for_prescale)
113 
114  return hlt_cut_module
115 
116 
117 def add_skim_software_trigger(path, store_array_debug_prescale=0):
118  """
119  Add the SoftwareTrigger for the skimming (after the filtering) to the given path.
120 
121  Only the calculation of the cuts is implemented here - the cut logic has to be done
122 
123  :param path: The path to which the module should be added.
124  :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
125  cut calculations in the data store.
126  :return: the software trigger module
127  """
128  # ECL cluster and track particle lists
129  modularAnalysis.fillParticleList("pi+:skim", 'pt>0.2 and abs(d0) < 2 and abs(z0) < 4', path=path)
130  modularAnalysis.fillParticleList("pi+:hadb", 'p>0.1 and abs(d0) < 2 and abs(z0) < 4', path=path)
131  modularAnalysis.fillParticleList("pi+:tau", 'abs(d0) < 2 and abs(z0) < 8', path=path)
132  modularAnalysis.fillParticleList("gamma:skim", 'E>0.1', path=path)
133  stdV0s.stdKshorts(path=path, fitter='KFit')
134  modularAnalysis.cutAndCopyList('K_S0:dstSkim', 'K_S0:merged', 'goodBelleKshort == 1', True, path=path)
135  stdV0s.stdLambdas(path=path)
136  modularAnalysis.fillParticleList("K+:dstSkim", 'abs(d0) < 2 and abs(z0) < 4', path=path)
137  modularAnalysis.fillParticleList("pi+:dstSkim", 'abs(d0) < 2 and abs(z0) < 4', path=path)
138  modularAnalysis.fillParticleList("gamma:loose", 'theta > 0.296706 and theta < 2.61799 and \
139  [[clusterReg == 1 and E > 0.03] or [clusterReg == 2 and E > 0.02] or [clusterReg == 3 and E > 0.03]] and \
140  [abs(clusterTiming) < formula(1.0 * clusterErrorTiming) or E > 0.1] and [clusterE1E9 > 0.3 or E > 0.1] ', path=path)
141  modularAnalysis.reconstructDecay('pi0:loose -> gamma:loose gamma:loose', '0.075 < M < 0.175', 1, True, path=path)
142  modularAnalysis.cutAndCopyList('pi0:veryLooseFit', 'pi0:loose', '', True, path=path)
143  vertex.kFit('pi0:veryLooseFit', 0.0, 'mass', path=path)
144  D0_Cut = '1.7 < M < 2.1'
145  D0_Ch = ['K-:dstSkim pi+:dstSkim',
146  'K-:dstSkim pi+:dstSkim pi0:veryLooseFit',
147  'K-:dstSkim pi+:dstSkim pi-:dstSkim pi+:dstSkim',
148  'K_S0:dstSkim pi+:dstSkim pi-:dstSkim']
149 
150  for chID, channel in enumerate(D0_Ch):
151  chID += 1
152  modularAnalysis.reconstructDecay('D0:ch' + str(chID) + ' -> ' + str(channel), D0_Cut, dmID=chID, path=path)
153 
154  Dst_Cut = 'useCMSFrame(p) > 2.2 and massDifference(0) < 0.16'
155  Dst_List = []
156 
157  for chID, channel in enumerate(D0_Ch):
158  chID += 1
159  modularAnalysis.reconstructDecay('D*+:ch' + str(chID) + ' -> D0:ch' + str(chID) + ' pi+:all', Dst_Cut, dmID=chID, path=path)
160  Dst_List.append('D*+:ch' + str(chID))
161  modularAnalysis.copyLists(outputListName='D*+:d0pi', inputListNames=Dst_List, path=path)
162  modularAnalysis.fillParticleList("pi+:offip", '[abs(d0) > 1 and abs(z0) > 2] and [nSVDHits >=3 or nCDCHits >= 20]', path=path)
163 
164  path.add_module("SoftwareTrigger", baseIdentifier="skim",
165  preScaleStoreDebugOutputToDataStore=store_array_debug_prescale)
166 
167 
168 def add_filter_reconstruction(path, run_type, components, **kwargs):
169  """
170  Add everything needed to calculation a filter decision and if possible,
171  also do the HLT filtering. This is only possible for beam runs (in the moment).
172 
173  Up to now, we add the full reconstruction, but this will change in the future.
174 
175  Please note that this function adds the HLT decision, but does not branch
176  according to it.
177  """
178  check_components(components)
179 
180  if run_type == constants.RunTypes.beam:
182  path,
183  skipGeometryAdding=True,
184  pruneTracks=False,
185  add_trigger_calculation=False,
186  components=components,
187  nCDCHitsMax=constants.DOOM_NCDCHITSMAX,
188  nSVDShaperDigitsMax=constants.DOOM_NSVDSHAPERDIGITSMAX,
189  event_abort=hlt_event_abort,
190  **kwargs)
191 
192  add_filter_software_trigger(path, store_array_debug_prescale=1)
193  path.add_module('StatisticsSummary').set_name('Sum_HLT_Filter_Calculation')
194 
195  elif run_type == constants.RunTypes.cosmic:
196  reconstruction.add_cosmics_reconstruction(path, skipGeometryAdding=True, pruneTracks=False,
197  components=components, **kwargs)
198  else:
199  basf2.B2FATAL(f"Run Type {run_type} not supported.")
200 
201 
202 def add_filter_module(path):
203  """
204  Add and return a skim module, which has a return value dependent
205  on the final HLT decision.
206  """
207  return path.add_module("TriggerSkim", triggerLines=["software_trigger_cut&all&total_result"])
208 
209 
210 def add_post_filter_reconstruction(path, run_type, components):
211  """
212  Add all modules which should run after the HLT decision is taken
213  and only on the accepted events.
214  Up to now, this only includes the skim part, but this will
215  change in the future.
216  """
217  check_components(components)
218 
219  if run_type == constants.RunTypes.beam:
220  add_skim_software_trigger(path, store_array_debug_prescale=1)
221  path.add_module('StatisticsSummary').set_name('Sum_HLT_Skim_Calculation')
222  elif run_type == constants.RunTypes.cosmic:
223  pass
224  else:
225  basf2.B2FATAL(f"Run Type {run_type} not supported.")
226 
227 
228 def hlt_event_abort(module, condition, error_flag):
229  """
230  Create a discard path suitable for HLT processing, i.e. set an error flag and
231  keep only the metadata.
232  """
233  p = basf2.Path()
234  p.add_module("EventErrorFlag", errorFlag=error_flag)
235  add_store_only_metadata_path(p)
236  module.if_value(condition, p, basf2.AfterConditionPath.CONTINUE)
modularAnalysis.reconstructDecay
def reconstructDecay(decayString, cut, dmID=0, writeOut=False, path=None, candidate_limit=None, ignoreIfTooManyCandidates=True, chargeConjugation=True, allowChargeViolation=False)
Definition: modularAnalysis.py:1022
stdV0s.stdLambdas
def stdLambdas(prioritiseV0=True, fitter='TreeFit', path=None)
Definition: stdV0s.py:69
reconstruction.add_cosmics_reconstruction
def add_cosmics_reconstruction(path, components=None, pruneTracks=True, skipGeometryAdding=False, eventTimingExtraction=True, addClusterExpertModules=True, merge_tracks=True, top_in_counter=False, data_taking_period='early_phase3', use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=False)
Definition: reconstruction.py:167
reconstruction.add_reconstruction
def add_reconstruction(path, components=None, pruneTracks=True, add_trigger_calculation=True, skipGeometryAdding=False, trackFitHypotheses=None, addClusterExpertModules=True, use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=None, nCDCHitsMax=6000, nSVDShaperDigitsMax=70000, event_abort=default_event_abort, use_random_numbers_for_hlt_prescale=True)
Definition: reconstruction.py:41
modularAnalysis.cutAndCopyList
def cutAndCopyList(outputListName, inputListName, cut, writeOut=False, path=None)
Definition: modularAnalysis.py:566
vertex.kFit
def kFit(list_name, conf_level, fit_type='vertex', constraint='', daughtersUpdate=False, decay_string='', smearing=0, path=None)
Definition: vertex.py:95
modularAnalysis.fillParticleList
def fillParticleList(decayString, cut, writeOut=False, path=None, enforceFitHypothesis=False)
Definition: modularAnalysis.py:754
stdV0s.stdKshorts
def stdKshorts(prioritiseV0=True, fitter='TreeFit', path=None)
Definition: stdV0s.py:10
modularAnalysis.copyLists
def copyLists(outputListName, inputListNames, writeOut=False, path=None)
Definition: modularAnalysis.py:472