Belle II Software  release-05-02-19
path_utils.py
1 import basf2
2 import ROOT
3 from softwaretrigger import constants
4 import modularAnalysis
5 import stdV0s
6 import vertex
7 from geometry import check_components
8 import reconstruction
9 
10 
11 def add_online_dqm(path, run_type, dqm_environment, components, dqm_mode, create_hlt_unit_histograms=False):
12  """
13  Add DQM plots for a specific run type and dqm environment
14  """
15 
16  # Local imports, as the dqm package is not checked out by default
17  from daqdqm.collisiondqm import add_collision_dqm
18  from daqdqm.cosmicdqm import add_cosmic_dqm
19 
20  if run_type == constants.RunTypes.beam:
21  add_collision_dqm(path, components=components, dqm_environment=dqm_environment,
22  dqm_mode=dqm_mode, create_hlt_unit_histograms=create_hlt_unit_histograms)
23  elif run_type == constants.RunTypes.cosmic:
24  add_cosmic_dqm(path, components=components, dqm_environment=dqm_environment,
25  dqm_mode=dqm_mode)
26  else:
27  basf2.B2FATAL("Run type {} not supported.".format(run_type))
28 
29  if dqm_mode in ["dont_care", "all_events"]:
30  path.add_module('DelayDQM', title=dqm_environment, histogramDirectoryName='DAQ')
31 
32 
33 def add_hlt_dqm(path, run_type, components, dqm_mode, create_hlt_unit_histograms=False):
34  """
35  Add all the DQM modules for HLT to the path
36  """
37  add_online_dqm(
38  path,
39  run_type=run_type,
40  dqm_environment=constants.Location.hlt.name,
41  components=components,
42  dqm_mode=dqm_mode.name,
43  create_hlt_unit_histograms=create_hlt_unit_histograms)
44  path.add_module('StatisticsSummary').set_name('Sum_HLT_DQM_'+dqm_mode.name)
45 
46 
47 def add_expressreco_dqm(path, run_type, components, dqm_mode=constants.DQMModes.dont_care.name):
48  """
49  Add all the DQM modules for ExpressReco to the path
50  """
51  add_online_dqm(path, run_type=run_type, dqm_environment=constants.Location.expressreco.name, components=components,
52  dqm_mode=dqm_mode)
53 
54 
55 def add_geometry_if_not_present(path):
56  """
57  Add the geometry and gearbox module if it was not already added to the path
58  """
59  if 'Gearbox' not in path:
60  path.add_module('Gearbox')
61 
62  if 'Geometry' not in path:
63  path.add_module('Geometry', useDB=True)
64 
65 
66 def add_store_only_metadata_path(path):
67  """
68  Helper function to create a path which deletes (prunes) everything from the data store except
69  things that are really needed, e.g. the event meta data and the results of the software trigger module.
70 
71  After this path was processed, you can not use the data store content any more to do reconstruction (because
72  it is more or less empty), but can only output it to a (S)ROOT file.
73  """
74  path.add_module("PruneDataStore", matchEntries=constants.ALWAYS_SAVE_OBJECTS).set_name("KeepMetaData")
75 
76 
77 def add_store_only_rawdata_path(path, additonal_store_arrays_to_keep=None):
78  """
79  Helper function to create a path which deletes (prunes) everything from the data store except
80  raw objects from the detector and things that are really needed, e.g. the event meta data and the results of the
81  software trigger module.
82 
83  After this path was processed, you can not use the data store content any more to do reconstruction (because
84  it is more or less empty), but can only output it to a (S)ROOT file.
85  """
86  entries_to_keep = constants.ALWAYS_SAVE_OBJECTS + constants.RAWDATA_OBJECTS
87 
88  if additonal_store_arrays_to_keep:
89  entries_to_keep += additonal_store_arrays_to_keep
90 
91  path.add_module("PruneDataStore", matchEntries=entries_to_keep).set_name("KeepRawData")
92 
93 
94 def add_filter_software_trigger(path,
95  store_array_debug_prescale=0,
96  use_random_numbers_for_prescale=True):
97  """
98  Add the SoftwareTrigger for the filter cuts to the given path.
99 
100  Only the calculation of the cuts is implemented here - the cut logic has to be done
101  using the module return value.
102 
103  :param path: The path to which the module should be added.
104  :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
105  cut calculations in the data store.
106  :param use_random_numbers_for_prescale: If True, the prescales are applied using randomly generated numbers,
107  otherwise are applied using an internal counter.
108  :return: the software trigger module
109  """
110  hlt_cut_module = path.add_module("SoftwareTrigger",
111  baseIdentifier="filter",
112  preScaleStoreDebugOutputToDataStore=store_array_debug_prescale,
113  useRandomNumbersForPreScale=use_random_numbers_for_prescale)
114 
115  return hlt_cut_module
116 
117 
118 def add_skim_software_trigger(path, store_array_debug_prescale=0):
119  """
120  Add the SoftwareTrigger for the skimming (after the filtering) to the given path.
121 
122  Only the calculation of the cuts is implemented here - the cut logic has to be done
123 
124  :param path: The path to which the module should be added.
125  :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
126  cut calculations in the data store.
127  :return: the software trigger module
128  """
129  # ECL cluster and track particle lists
130  modularAnalysis.fillParticleList("pi+:skim", 'pt>0.2 and abs(d0) < 2 and abs(z0) < 4', path=path)
131  modularAnalysis.fillParticleList("pi+:hadb", 'p>0.1 and abs(d0) < 2 and abs(z0) < 4', path=path)
132  modularAnalysis.fillParticleList("pi+:tau", 'abs(d0) < 2 and abs(z0) < 8', path=path)
133  modularAnalysis.fillParticleList("gamma:skim", 'E>0.1', path=path)
134  stdV0s.stdKshorts(path=path, fitter='KFit')
135  modularAnalysis.cutAndCopyList('K_S0:dstSkim', 'K_S0:merged', 'goodBelleKshort == 1', True, path=path)
136  stdV0s.stdLambdas(path=path)
137  modularAnalysis.fillParticleList("K+:dstSkim", 'abs(d0) < 2 and abs(z0) < 4', path=path)
138  modularAnalysis.fillParticleList("pi+:dstSkim", 'abs(d0) < 2 and abs(z0) < 4', path=path)
139  modularAnalysis.fillParticleList("gamma:loose", 'theta > 0.296706 and theta < 2.61799 and \
140  [[clusterReg == 1 and E > 0.03] or [clusterReg == 2 and E > 0.02] or [clusterReg == 3 and E > 0.03]] and \
141  [abs(clusterTiming) < formula(1.0 * clusterErrorTiming) or E > 0.1] and [clusterE1E9 > 0.3 or E > 0.1] ', path=path)
142  modularAnalysis.reconstructDecay('pi0:loose -> gamma:loose gamma:loose', '0.075 < M < 0.175', 1, True, path=path)
143  modularAnalysis.cutAndCopyList('pi0:veryLooseFit', 'pi0:loose', '', True, path=path)
144  vertex.kFit('pi0:veryLooseFit', 0.0, 'mass', path=path)
145  D0_Cut = '1.7 < M < 2.1'
146  D0_Ch = ['K-:dstSkim pi+:dstSkim',
147  'K-:dstSkim pi+:dstSkim pi0:veryLooseFit',
148  'K-:dstSkim pi+:dstSkim pi-:dstSkim pi+:dstSkim',
149  'K_S0:dstSkim pi+:dstSkim pi-:dstSkim']
150 
151  for chID, channel in enumerate(D0_Ch):
152  chID += 1
153  modularAnalysis.reconstructDecay('D0:ch' + str(chID) + ' -> ' + str(channel), D0_Cut, dmID=chID, path=path)
154 
155  Dst_Cut = 'useCMSFrame(p) > 2.2 and massDifference(0) < 0.16'
156  Dst_List = []
157 
158  for chID, channel in enumerate(D0_Ch):
159  chID += 1
160  modularAnalysis.reconstructDecay('D*+:ch' + str(chID) + ' -> D0:ch' + str(chID) + ' pi+:all', Dst_Cut, dmID=chID, path=path)
161  Dst_List.append('D*+:ch' + str(chID))
162  modularAnalysis.copyLists(outputListName='D*+:d0pi', inputListNames=Dst_List, path=path)
163  modularAnalysis.fillParticleList("pi+:offip", '[abs(d0) > 1 and abs(z0) > 2] and [nSVDHits >=3 or nCDCHits >= 20]', path=path)
164 
165  path.add_module("SoftwareTrigger", baseIdentifier="skim",
166  preScaleStoreDebugOutputToDataStore=store_array_debug_prescale)
167 
168 
169 def add_filter_reconstruction(path, run_type, components, **kwargs):
170  """
171  Add everything needed to calculation a filter decision and if possible,
172  also do the HLT filtering. This is only possible for beam runs (in the moment).
173 
174  Up to now, we add the full reconstruction, but this will change in the future.
175 
176  Please note that this function adds the HLT decision, but does not branch
177  according to it.
178  """
179  check_components(components)
180 
181  if run_type == constants.RunTypes.beam:
183  path,
184  skipGeometryAdding=True,
185  pruneTracks=False,
186  add_trigger_calculation=False,
187  components=components,
188  nCDCHitsMax=constants.DOOM_NCDCHITSMAX,
189  nSVDShaperDigitsMax=constants.DOOM_NSVDSHAPERDIGITSMAX,
190  event_abort=hlt_event_abort,
191  **kwargs)
192  add_filter_software_trigger(path, store_array_debug_prescale=1)
193  path.add_module('StatisticsSummary').set_name('Sum_HLT_Filter_Calculation')
194 
195  elif run_type == constants.RunTypes.cosmic:
196  reconstruction.add_cosmics_reconstruction(path, skipGeometryAdding=True, pruneTracks=False,
197  components=components, **kwargs)
198  add_filter_software_trigger(path, store_array_debug_prescale=1)
199  path.add_module('StatisticsSummary').set_name('Sum_HLT_Filter_Calculation')
200 
201  else:
202  basf2.B2FATAL(f"Run Type {run_type} not supported.")
203 
204 
205 def add_filter_module(path):
206  """
207  Add and return a skim module, which has a return value dependent
208  on the final HLT decision.
209  """
210  return path.add_module("TriggerSkim", triggerLines=["software_trigger_cut&all&total_result"])
211 
212 
213 def add_post_filter_reconstruction(path, run_type, components):
214  """
215  Add all modules which should run after the HLT decision is taken
216  and only on the accepted events.
217  Up to now, this only includes the skim part, but this will
218  change in the future.
219  """
220  check_components(components)
221 
222  # Currently, the post filter reconstruction for physics and cosmics events is exactly the same.
223  if run_type == constants.RunTypes.beam:
224  add_skim_software_trigger(path, store_array_debug_prescale=1)
225  path.add_module('StatisticsSummary').set_name('Sum_HLT_Skim_Calculation')
226  elif run_type == constants.RunTypes.cosmic:
227  add_skim_software_trigger(path, store_array_debug_prescale=1)
228  path.add_module('StatisticsSummary').set_name('Sum_HLT_Skim_Calculation')
229  else:
230  basf2.B2FATAL(f"Run Type {run_type} not supported.")
231 
232 
233 def hlt_event_abort(module, condition, error_flag):
234  """
235  Create a discard path suitable for HLT processing, i.e. set an error flag and
236  keep only the metadata.
237  """
238  p = basf2.Path()
239  p.add_module("EventErrorFlag", errorFlag=error_flag)
240  add_store_only_metadata_path(p)
241  module.if_value(condition, p, basf2.AfterConditionPath.CONTINUE)
242  if error_flag == ROOT.Belle2.EventMetaData.c_HLTDiscard:
243  p.add_module('StatisticsSummary').set_name('Sum_HLT_Discard')
modularAnalysis.reconstructDecay
def reconstructDecay(decayString, cut, dmID=0, writeOut=False, path=None, candidate_limit=None, ignoreIfTooManyCandidates=True, chargeConjugation=True, allowChargeViolation=False)
Definition: modularAnalysis.py:1023
stdV0s.stdLambdas
def stdLambdas(prioritiseV0=True, fitter='TreeFit', path=None)
Definition: stdV0s.py:69
reconstruction.add_cosmics_reconstruction
def add_cosmics_reconstruction(path, components=None, pruneTracks=True, skipGeometryAdding=False, eventTimingExtraction=True, addClusterExpertModules=True, merge_tracks=True, top_in_counter=False, data_taking_period='early_phase3', use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=False)
Definition: reconstruction.py:256
reconstruction.add_reconstruction
def add_reconstruction(path, components=None, pruneTracks=True, add_trigger_calculation=True, skipGeometryAdding=False, trackFitHypotheses=None, addClusterExpertModules=True, use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=None, nCDCHitsMax=6000, nSVDShaperDigitsMax=70000, event_abort=default_event_abort, use_random_numbers_for_hlt_prescale=True)
Definition: reconstruction.py:130
modularAnalysis.cutAndCopyList
def cutAndCopyList(outputListName, inputListName, cut, writeOut=False, path=None)
Definition: modularAnalysis.py:567
vertex.kFit
def kFit(list_name, conf_level, fit_type='vertex', constraint='', daughtersUpdate=False, decay_string='', smearing=0, path=None)
Definition: vertex.py:95
modularAnalysis.fillParticleList
def fillParticleList(decayString, cut, writeOut=False, path=None, enforceFitHypothesis=False)
Definition: modularAnalysis.py:755
stdV0s.stdKshorts
def stdKshorts(prioritiseV0=True, fitter='TreeFit', path=None)
Definition: stdV0s.py:10
modularAnalysis.copyLists
def copyLists(outputListName, inputListNames, writeOut=False, path=None)
Definition: modularAnalysis.py:473