Belle II Software  release-08-01-10
path_utils.py
1 
8 
9 import basf2
10 from softwaretrigger import constants
11 import modularAnalysis
12 import stdV0s
13 import vertex
14 from geometry import check_components
15 from softwaretrigger.reconstruction_utils import bToCharmHLTSkim
16 
17 
18 def add_online_dqm(path, run_type, dqm_environment, components, dqm_mode, create_hlt_unit_histograms=False):
19  """
20  Add DQM plots for a specific run type and dqm environment
21  """
22 
23  # Local imports, as the dqm package is not checked out by default
24  from daqdqm.collisiondqm import add_collision_dqm
25  from daqdqm.cosmicdqm import add_cosmic_dqm
26 
27  if run_type == constants.RunTypes.beam:
28  add_collision_dqm(path, components=components, dqm_environment=dqm_environment,
29  dqm_mode=dqm_mode, create_hlt_unit_histograms=create_hlt_unit_histograms)
30  elif run_type == constants.RunTypes.cosmic:
31  add_cosmic_dqm(path, components=components, dqm_environment=dqm_environment,
32  dqm_mode=dqm_mode, create_hlt_unit_histograms=create_hlt_unit_histograms)
33  else:
34  basf2.B2FATAL(f"Run type {run_type} not supported.")
35 
36  if dqm_mode in ["dont_care", "all_events"]:
37  path.add_module('DelayDQM', title=dqm_environment, histogramDirectoryName='DAQ')
38 
39 
40 def add_hlt_dqm(path, run_type, components, dqm_mode, create_hlt_unit_histograms=False):
41  """
42  Add all the DQM modules for HLT to the path
43  """
44  add_online_dqm(
45  path,
46  run_type=run_type,
47  dqm_environment=constants.Location.hlt.name,
48  components=components,
49  dqm_mode=dqm_mode.name,
50  create_hlt_unit_histograms=create_hlt_unit_histograms)
51  path.add_module('StatisticsSummary').set_name('Sum_HLT_DQM_' + dqm_mode.name)
52 
53 
54 def add_expressreco_dqm(path, run_type, components, dqm_mode=constants.DQMModes.dont_care.name):
55  """
56  Add all the DQM modules for ExpressReco to the path
57  """
58  add_online_dqm(path, run_type=run_type, dqm_environment=constants.Location.expressreco.name, components=components,
59  dqm_mode=dqm_mode)
60 
61 
62 def add_geometry_if_not_present(path):
63  """
64  Add the geometry and gearbox module if it was not already added to the path
65  """
66  if 'Gearbox' not in path:
67  path.add_module('Gearbox')
68 
69  if 'Geometry' not in path:
70  path.add_module('Geometry', useDB=True)
71 
72 
73 def add_store_only_metadata_path(path):
74  """
75  Helper function to create a path which deletes (prunes) everything from the data store except
76  things that are really needed, e.g. the event meta data and the results of the software trigger module.
77 
78  After this path was processed, you can not use the data store content any more to do reconstruction (because
79  it is more or less empty), but can only output it to a (S)ROOT file.
80  """
81  path.add_module("PruneDataStore", matchEntries=constants.ALWAYS_SAVE_OBJECTS).set_name("KeepMetaData")
82 
83 
84 def add_store_only_rawdata_path(path, additonal_store_arrays_to_keep=None):
85  """
86  Helper function to create a path which deletes (prunes) everything from the data store except
87  raw objects from the detector and things that are really needed, e.g. the event meta data and the results of the
88  software trigger module.
89 
90  After this path was processed, you can not use the data store content any more to do reconstruction (because
91  it is more or less empty), but can only output it to a (S)ROOT file.
92  """
93  entries_to_keep = constants.ALWAYS_SAVE_OBJECTS + constants.RAWDATA_OBJECTS
94 
95  if additonal_store_arrays_to_keep:
96  entries_to_keep += additonal_store_arrays_to_keep
97 
98  path.add_module("PruneDataStore", matchEntries=entries_to_keep).set_name("KeepRawData")
99 
100 
101 def add_filter_software_trigger(path,
102  store_array_debug_prescale=0,
103  use_random_numbers_for_prescale=True):
104  """
105  Add the SoftwareTrigger for the filter cuts to the given path.
106 
107  Only the calculation of the cuts is implemented here - the cut logic has to be done
108  using the module return value.
109 
110  :param path: The path to which the module should be added.
111  :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
112  cut calculations in the data store.
113  :param use_random_numbers_for_prescale: If True, the prescales are applied using randomly generated numbers,
114  otherwise are applied using an internal counter.
115  :return: the software trigger module
116  """
117  hlt_cut_module = path.add_module("SoftwareTrigger",
118  baseIdentifier="filter",
119  preScaleStoreDebugOutputToDataStore=store_array_debug_prescale,
120  useRandomNumbersForPreScale=use_random_numbers_for_prescale)
121  # Statistics Summary
122  path.add_module('StatisticsSummary').set_name('Sum_HLT_Filter_Calculation')
123 
124  return hlt_cut_module
125 
126 
127 def add_skim_software_trigger(path, store_array_debug_prescale=0):
128  """
129  Add the SoftwareTrigger for the skimming (after the filtering) to the given path.
130 
131  Only the calculation of the cuts is implemented here - the cut logic has to be done
132 
133  :param path: The path to which the module should be added.
134  :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
135  cut calculations in the data store.
136  :return: the software trigger module
137  """
138  # ECL cluster and track particle lists
139  modularAnalysis.fillParticleList("pi+:skim", 'pt>0.2 and abs(d0) < 2 and abs(z0) < 4', path=path)
140  modularAnalysis.fillParticleList("pi+:hadb", 'p>0.1 and abs(d0) < 2 and abs(z0) < 4', path=path)
141  modularAnalysis.fillParticleList("pi+:tau", 'abs(d0) < 2 and abs(z0) < 8', path=path)
142  modularAnalysis.fillParticleList("gamma:skim", 'E>0.1', path=path)
143  stdV0s.stdKshorts(path=path, fitter='KFit')
144  modularAnalysis.cutAndCopyList('K_S0:dstSkim', 'K_S0:merged', 'goodBelleKshort == 1', path=path)
145  stdV0s.stdLambdas(path=path)
146  modularAnalysis.fillParticleList("K+:dstSkim", 'abs(d0) < 2 and abs(z0) < 4', path=path)
147  modularAnalysis.fillParticleList("pi+:dstSkim", 'abs(d0) < 2 and abs(z0) < 4', path=path)
148  modularAnalysis.fillParticleList("gamma:loose", 'theta > 0.296706 and theta < 2.61799 and \
149  [[clusterReg == 1 and E > 0.03] or [clusterReg == 2 and E > 0.02] or [clusterReg == 3 and E > 0.03]] and \
150  [abs(clusterTiming) < formula(1.0 * clusterErrorTiming) or E > 0.1] and [clusterE1E9 > 0.3 or E > 0.1]', path=path)
151  modularAnalysis.reconstructDecay('pi0:loose -> gamma:loose gamma:loose', '0.075 < M < 0.175', 1, path=path)
152  modularAnalysis.cutAndCopyList('pi0:veryLooseFit', 'pi0:loose', '', path=path)
153  vertex.kFit('pi0:veryLooseFit', 0.0, 'mass', path=path)
154  D0_Cut = '1.7 < M < 2.1'
155  D0_Ch = ['K-:dstSkim pi+:dstSkim',
156  'K-:dstSkim pi+:dstSkim pi0:veryLooseFit',
157  'K-:dstSkim pi+:dstSkim pi-:dstSkim pi+:dstSkim',
158  'K_S0:dstSkim pi+:dstSkim pi-:dstSkim']
159 
160  for chID, channel in enumerate(D0_Ch):
161  chID += 1
162  modularAnalysis.reconstructDecay('D0:ch' + str(chID) + ' -> ' + str(channel), D0_Cut, dmID=chID, path=path)
163 
164  Dst_Cut = 'useCMSFrame(p) > 2.2 and massDifference(0) < 0.16'
165  Dst_List = []
166 
167  for chID, channel in enumerate(D0_Ch):
168  chID += 1
169  modularAnalysis.reconstructDecay('D*+:ch' + str(chID) + ' -> D0:ch' + str(chID) + ' pi+:all', Dst_Cut, dmID=chID, path=path)
170  Dst_List.append('D*+:ch' + str(chID))
171  modularAnalysis.copyLists(outputListName='D*+:d0pi', inputListNames=Dst_List, path=path)
172  modularAnalysis.fillParticleList("pi+:offip", '[abs(z0) > 10] and [nSVDHits >=3 or nCDCHits >= 20]', path=path)
173  bToCharmHLTSkim(path)
174 
175  path.add_module("SoftwareTrigger", baseIdentifier="skim",
176  preScaleStoreDebugOutputToDataStore=store_array_debug_prescale)
177 
178  # Statistics Summary
179  path.add_module('StatisticsSummary').set_name('Sum_HLT_Skim_Calculation')
180 
181 
182 def add_pre_filter_reconstruction(path, run_type, components, **kwargs):
183  """
184  Add everything needed to calculation a filter decision and if possible,
185  also do the HLT filtering. This is only possible for beam runs (in the moment).
186 
187  Please note that this function adds the HLT decision, but does not branch
188  according to it.
189  """
190  import reconstruction # noqa
191 
192  check_components(components)
193 
194  if run_type == constants.RunTypes.beam:
196  path,
197  skipGeometryAdding=True,
198  components=components,
199  event_abort=hlt_event_abort,
200  **kwargs)
201 
202  elif run_type == constants.RunTypes.cosmic:
203  reconstruction.add_cosmics_reconstruction(path, skipGeometryAdding=True, pruneTracks=False,
204  components=components, **kwargs)
205 
206  else:
207  basf2.B2FATAL(f"Run Type {run_type} not supported.")
208 
209 
210 def add_filter_module(path):
211  """
212  Add and return a skim module, which has a return value dependent
213  on the final HLT decision.
214  """
215  return path.add_module("TriggerSkim", triggerLines=["software_trigger_cut&all&total_result"])
216 
217 
218 def add_post_filter_reconstruction(path, run_type, components):
219  """
220  Add all modules which should run after the HLT decision is taken
221  and only on the accepted events.
222  This includes reconstruction modules not essential
223  to calculate filter decision and then the skim calculation.
224  """
225  import reconstruction # noqa
226 
227  check_components(components)
228 
229  if run_type == constants.RunTypes.beam:
230  reconstruction.add_postfilter_reconstruction(path, components=components, pruneTracks=False)
231 
232  add_skim_software_trigger(path, store_array_debug_prescale=1)
233  elif run_type == constants.RunTypes.cosmic:
234  add_skim_software_trigger(path, store_array_debug_prescale=1)
235  else:
236  basf2.B2FATAL(f"Run Type {run_type} not supported.")
237 
238 
239 def hlt_event_abort(module, condition, error_flag):
240  """
241  Create a discard path suitable for HLT processing, i.e. set an error flag and
242  keep only the metadata.
243  """
244 
245  # Always avoid the top-level 'import ROOT'.
246  import ROOT # noqa
247 
248  p = basf2.Path()
249  p.add_module("EventErrorFlag", errorFlag=error_flag)
250  add_store_only_metadata_path(p)
251  module.if_value(condition, p, basf2.AfterConditionPath.CONTINUE)
252  if error_flag == ROOT.Belle2.EventMetaData.c_HLTDiscard:
253  p.add_module('StatisticsSummary').set_name('Sum_HLT_Discard')
def cutAndCopyList(outputListName, inputListName, cut, writeOut=False, path=None)
def reconstructDecay(decayString, cut, dmID=0, writeOut=False, path=None, candidate_limit=None, ignoreIfTooManyCandidates=True, chargeConjugation=True, allowChargeViolation=False)
def copyLists(outputListName, inputListNames, writeOut=False, path=None)
def fillParticleList(decayString, cut, writeOut=False, path=None, enforceFitHypothesis=False, loadPhotonsFromKLM=False)
def add_cosmics_reconstruction(path, components=None, pruneTracks=True, skipGeometryAdding=False, eventTimingExtraction=True, addClusterExpertModules=True, merge_tracks=True, use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=False, posttracking=True, eventt0_combiner_mode="prefer_cdc", legacy_ecl_charged_pid=False)
def add_prefilter_reconstruction(path, components=None, add_modules_for_trigger_calculation=True, skipGeometryAdding=False, trackFitHypotheses=None, use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=None, event_abort=default_event_abort, pxd_filtering_offline=False, append_full_grid_cdc_eventt0=False)
def add_postfilter_reconstruction(path, components=None, pruneTracks=False, addClusterExpertModules=True, reconstruct_cdst=None, legacy_ecl_charged_pid=False)
def stdKshorts(prioritiseV0=True, fitter='TreeFit', path=None, updateAllDaughters=False, writeOut=False)
Definition: stdV0s.py:17
def stdLambdas(prioritiseV0=True, fitter='TreeFit', path=None, updateAllDaughters=False, writeOut=False)
Definition: stdV0s.py:182
def kFit(list_name, conf_level, fit_type='vertex', constraint='', daughtersUpdate=False, decay_string='', massConstraint=[], recoilMass=0, smearing=0, path=None)
Definition: vertex.py:129