Belle II Software  release-06-02-00
path_utils.py
1 
8 import basf2
9 import ROOT
10 from softwaretrigger import constants
11 import modularAnalysis
12 import stdV0s
13 import vertex
14 from geometry import check_components
15 import reconstruction
16 from softwaretrigger.reconstruction_utils import bToCharmHLTSkim
17 
18 
19 def add_online_dqm(path, run_type, dqm_environment, components, dqm_mode, create_hlt_unit_histograms=False):
20  """
21  Add DQM plots for a specific run type and dqm environment
22  """
23 
24  # Local imports, as the dqm package is not checked out by default
25  from daqdqm.collisiondqm import add_collision_dqm
26  from daqdqm.cosmicdqm import add_cosmic_dqm
27 
28  if run_type == constants.RunTypes.beam:
29  add_collision_dqm(path, components=components, dqm_environment=dqm_environment,
30  dqm_mode=dqm_mode, create_hlt_unit_histograms=create_hlt_unit_histograms)
31  elif run_type == constants.RunTypes.cosmic:
32  add_cosmic_dqm(path, components=components, dqm_environment=dqm_environment,
33  dqm_mode=dqm_mode, create_hlt_unit_histograms=create_hlt_unit_histograms)
34  else:
35  basf2.B2FATAL(f"Run type {run_type} not supported.")
36 
37  if dqm_mode in ["dont_care", "all_events"]:
38  path.add_module('DelayDQM', title=dqm_environment, histogramDirectoryName='DAQ')
39 
40 
41 def add_hlt_dqm(path, run_type, components, dqm_mode, create_hlt_unit_histograms=False):
42  """
43  Add all the DQM modules for HLT to the path
44  """
45  add_online_dqm(
46  path,
47  run_type=run_type,
48  dqm_environment=constants.Location.hlt.name,
49  components=components,
50  dqm_mode=dqm_mode.name,
51  create_hlt_unit_histograms=create_hlt_unit_histograms)
52  path.add_module('StatisticsSummary').set_name('Sum_HLT_DQM_' + dqm_mode.name)
53 
54 
55 def add_expressreco_dqm(path, run_type, components, dqm_mode=constants.DQMModes.dont_care.name):
56  """
57  Add all the DQM modules for ExpressReco to the path
58  """
59  add_online_dqm(path, run_type=run_type, dqm_environment=constants.Location.expressreco.name, components=components,
60  dqm_mode=dqm_mode)
61 
62 
63 def add_geometry_if_not_present(path):
64  """
65  Add the geometry and gearbox module if it was not already added to the path
66  """
67  if 'Gearbox' not in path:
68  path.add_module('Gearbox')
69 
70  if 'Geometry' not in path:
71  path.add_module('Geometry', useDB=True)
72 
73 
74 def add_store_only_metadata_path(path):
75  """
76  Helper function to create a path which deletes (prunes) everything from the data store except
77  things that are really needed, e.g. the event meta data and the results of the software trigger module.
78 
79  After this path was processed, you can not use the data store content any more to do reconstruction (because
80  it is more or less empty), but can only output it to a (S)ROOT file.
81  """
82  path.add_module("PruneDataStore", matchEntries=constants.ALWAYS_SAVE_OBJECTS).set_name("KeepMetaData")
83 
84 
85 def add_store_only_rawdata_path(path, additonal_store_arrays_to_keep=None):
86  """
87  Helper function to create a path which deletes (prunes) everything from the data store except
88  raw objects from the detector and things that are really needed, e.g. the event meta data and the results of the
89  software trigger module.
90 
91  After this path was processed, you can not use the data store content any more to do reconstruction (because
92  it is more or less empty), but can only output it to a (S)ROOT file.
93  """
94  entries_to_keep = constants.ALWAYS_SAVE_OBJECTS + constants.RAWDATA_OBJECTS
95 
96  if additonal_store_arrays_to_keep:
97  entries_to_keep += additonal_store_arrays_to_keep
98 
99  path.add_module("PruneDataStore", matchEntries=entries_to_keep).set_name("KeepRawData")
100 
101 
102 def add_filter_software_trigger(path,
103  store_array_debug_prescale=0,
104  use_random_numbers_for_prescale=True):
105  """
106  Add the SoftwareTrigger for the filter cuts to the given path.
107 
108  Only the calculation of the cuts is implemented here - the cut logic has to be done
109  using the module return value.
110 
111  :param path: The path to which the module should be added.
112  :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
113  cut calculations in the data store.
114  :param use_random_numbers_for_prescale: If True, the prescales are applied using randomly generated numbers,
115  otherwise are applied using an internal counter.
116  :return: the software trigger module
117  """
118  hlt_cut_module = path.add_module("SoftwareTrigger",
119  baseIdentifier="filter",
120  preScaleStoreDebugOutputToDataStore=store_array_debug_prescale,
121  useRandomNumbersForPreScale=use_random_numbers_for_prescale)
122  # Statistics Summary
123  path.add_module('StatisticsSummary').set_name('Sum_HLT_Filter_Calculation')
124 
125  return hlt_cut_module
126 
127 
128 def add_skim_software_trigger(path, store_array_debug_prescale=0):
129  """
130  Add the SoftwareTrigger for the skimming (after the filtering) to the given path.
131 
132  Only the calculation of the cuts is implemented here - the cut logic has to be done
133 
134  :param path: The path to which the module should be added.
135  :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
136  cut calculations in the data store.
137  :return: the software trigger module
138  """
139  # ECL cluster and track particle lists
140  modularAnalysis.fillParticleList("pi+:skim", 'pt>0.2 and abs(d0) < 2 and abs(z0) < 4', path=path)
141  modularAnalysis.fillParticleList("pi+:hadb", 'p>0.1 and abs(d0) < 2 and abs(z0) < 4', path=path)
142  modularAnalysis.fillParticleList("pi+:tau", 'abs(d0) < 2 and abs(z0) < 8', path=path)
143  modularAnalysis.fillParticleList("gamma:skim", 'E>0.1', loadPhotonBeamBackgroundMVA=False, path=path)
144  stdV0s.stdKshorts(path=path, fitter='KFit')
145  modularAnalysis.cutAndCopyList('K_S0:dstSkim', 'K_S0:merged', 'goodBelleKshort == 1', True, path=path)
146  stdV0s.stdLambdas(path=path)
147  modularAnalysis.fillParticleList("K+:dstSkim", 'abs(d0) < 2 and abs(z0) < 4', path=path)
148  modularAnalysis.fillParticleList("pi+:dstSkim", 'abs(d0) < 2 and abs(z0) < 4', path=path)
149  modularAnalysis.fillParticleList("gamma:loose", 'theta > 0.296706 and theta < 2.61799 and \
150  [[clusterReg == 1 and E > 0.03] or [clusterReg == 2 and E > 0.02] or [clusterReg == 3 and E > 0.03]] and \
151  [abs(clusterTiming) < formula(1.0 * clusterErrorTiming) or E > 0.1] and [clusterE1E9 > 0.3 or E > 0.1]',
152  loadPhotonBeamBackgroundMVA=False, path=path)
153  modularAnalysis.reconstructDecay('pi0:loose -> gamma:loose gamma:loose', '0.075 < M < 0.175', 1, True, path=path)
154  modularAnalysis.cutAndCopyList('pi0:veryLooseFit', 'pi0:loose', '', True, path=path)
155  vertex.kFit('pi0:veryLooseFit', 0.0, 'mass', path=path)
156  D0_Cut = '1.7 < M < 2.1'
157  D0_Ch = ['K-:dstSkim pi+:dstSkim',
158  'K-:dstSkim pi+:dstSkim pi0:veryLooseFit',
159  'K-:dstSkim pi+:dstSkim pi-:dstSkim pi+:dstSkim',
160  'K_S0:dstSkim pi+:dstSkim pi-:dstSkim']
161 
162  for chID, channel in enumerate(D0_Ch):
163  chID += 1
164  modularAnalysis.reconstructDecay('D0:ch' + str(chID) + ' -> ' + str(channel), D0_Cut, dmID=chID, path=path)
165 
166  Dst_Cut = 'useCMSFrame(p) > 2.2 and massDifference(0) < 0.16'
167  Dst_List = []
168 
169  for chID, channel in enumerate(D0_Ch):
170  chID += 1
171  modularAnalysis.reconstructDecay('D*+:ch' + str(chID) + ' -> D0:ch' + str(chID) + ' pi+:all', Dst_Cut, dmID=chID, path=path)
172  Dst_List.append('D*+:ch' + str(chID))
173  modularAnalysis.copyLists(outputListName='D*+:d0pi', inputListNames=Dst_List, path=path)
174  modularAnalysis.fillParticleList("pi+:offip", '[abs(z0) > 10] and [nSVDHits >=3 or nCDCHits >= 20]', path=path)
175  bToCharmHLTSkim(path)
176 
177  path.add_module("SoftwareTrigger", baseIdentifier="skim",
178  preScaleStoreDebugOutputToDataStore=store_array_debug_prescale)
179 
180  # Statistics Summary
181  path.add_module('StatisticsSummary').set_name('Sum_HLT_Skim_Calculation')
182 
183 
184 def add_pre_filter_reconstruction(path, run_type, components, **kwargs):
185  """
186  Add everything needed to calculation a filter decision and if possible,
187  also do the HLT filtering. This is only possible for beam runs (in the moment).
188 
189  Please note that this function adds the HLT decision, but does not branch
190  according to it.
191  """
192  check_components(components)
193 
194  if run_type == constants.RunTypes.beam:
196  path,
197  skipGeometryAdding=True,
198  pruneTracks=False,
199  components=components,
200  event_abort=hlt_event_abort,
201  **kwargs)
202 
203  elif run_type == constants.RunTypes.cosmic:
204  reconstruction.add_cosmics_reconstruction(path, skipGeometryAdding=True, pruneTracks=False,
205  components=components, **kwargs)
206 
207  else:
208  basf2.B2FATAL(f"Run Type {run_type} not supported.")
209 
210 
211 def add_filter_module(path):
212  """
213  Add and return a skim module, which has a return value dependent
214  on the final HLT decision.
215  """
216  return path.add_module("TriggerSkim", triggerLines=["software_trigger_cut&all&total_result"])
217 
218 
219 def add_post_filter_reconstruction(path, run_type, components):
220  """
221  Add all modules which should run after the HLT decision is taken
222  and only on the accepted events.
223  This includes reconstruction modules not essential
224  to calculate filter decision and then the skim calculation.
225  """
226  check_components(components)
227 
228  if run_type == constants.RunTypes.beam:
229  reconstruction.add_postfilter_reconstruction(path, components=components)
230 
231  add_skim_software_trigger(path, store_array_debug_prescale=1)
232  elif run_type == constants.RunTypes.cosmic:
233  add_skim_software_trigger(path, store_array_debug_prescale=1)
234  else:
235  basf2.B2FATAL(f"Run Type {run_type} not supported.")
236 
237 
238 def hlt_event_abort(module, condition, error_flag):
239  """
240  Create a discard path suitable for HLT processing, i.e. set an error flag and
241  keep only the metadata.
242  """
243  p = basf2.Path()
244  p.add_module("EventErrorFlag", errorFlag=error_flag)
245  add_store_only_metadata_path(p)
246  module.if_value(condition, p, basf2.AfterConditionPath.CONTINUE)
247  if error_flag == ROOT.Belle2.EventMetaData.c_HLTDiscard:
248  p.add_module('StatisticsSummary').set_name('Sum_HLT_Discard')
def fillParticleList(decayString, cut, writeOut=False, path=None, enforceFitHypothesis=False, loadPhotonsFromKLM=False, loadPhotonBeamBackgroundMVA=False)
def cutAndCopyList(outputListName, inputListName, cut, writeOut=False, path=None)
def reconstructDecay(decayString, cut, dmID=0, writeOut=False, path=None, candidate_limit=None, ignoreIfTooManyCandidates=True, chargeConjugation=True, allowChargeViolation=False)
def copyLists(outputListName, inputListNames, writeOut=False, path=None)
def add_postfilter_reconstruction(path, components=None, pruneTracks=False)
def add_cosmics_reconstruction(path, components=None, pruneTracks=True, skipGeometryAdding=False, eventTimingExtraction=True, addClusterExpertModules=True, merge_tracks=True, use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=False, posttracking=True)
def add_prefilter_reconstruction(path, components=None, add_modules_for_trigger_calculation=True, skipGeometryAdding=False, trackFitHypotheses=None, use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=None, addClusterExpertModules=True, pruneTracks=True, event_abort=default_event_abort, use_random_numbers_for_hlt_prescale=True)
def stdKshorts(prioritiseV0=True, fitter='TreeFit', path=None)
Definition: stdV0s.py:17
def stdLambdas(prioritiseV0=True, fitter='TreeFit', path=None)
Definition: stdV0s.py:77
def kFit(list_name, conf_level, fit_type='vertex', constraint='', daughtersUpdate=False, decay_string='', massConstraint=[], smearing=0, path=None)
Definition: vertex.py:121