3 from softwaretrigger
import constants
7 from geometry
import check_components
11 def add_online_dqm(path, run_type, dqm_environment, components, dqm_mode, create_hlt_unit_histograms=False):
13 Add DQM plots for a specific run type and dqm environment
17 from daqdqm.collisiondqm
import add_collision_dqm
18 from daqdqm.cosmicdqm
import add_cosmic_dqm
20 if run_type == constants.RunTypes.beam:
21 add_collision_dqm(path, components=components, dqm_environment=dqm_environment,
22 dqm_mode=dqm_mode, create_hlt_unit_histograms=create_hlt_unit_histograms)
23 elif run_type == constants.RunTypes.cosmic:
24 add_cosmic_dqm(path, components=components, dqm_environment=dqm_environment,
27 basf2.B2FATAL(
"Run type {} not supported.".format(run_type))
29 if dqm_mode
in [
"dont_care",
"all_events"]:
30 path.add_module(
'DelayDQM', title=dqm_environment, histogramDirectoryName=
'DAQ')
33 def add_hlt_dqm(path, run_type, components, dqm_mode, create_hlt_unit_histograms=False):
35 Add all the DQM modules for HLT to the path
40 dqm_environment=constants.Location.hlt.name,
41 components=components,
42 dqm_mode=dqm_mode.name,
43 create_hlt_unit_histograms=create_hlt_unit_histograms)
44 path.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_DQM_'+dqm_mode.name)
47 def add_expressreco_dqm(path, run_type, components, dqm_mode=constants.DQMModes.dont_care.name):
49 Add all the DQM modules for ExpressReco to the path
51 add_online_dqm(path, run_type=run_type, dqm_environment=constants.Location.expressreco.name, components=components,
55 def add_geometry_if_not_present(path):
57 Add the geometry and gearbox module if it was not already added to the path
59 if 'Gearbox' not in path:
60 path.add_module(
'Gearbox')
62 if 'Geometry' not in path:
63 path.add_module(
'Geometry', useDB=
True)
66 def add_store_only_metadata_path(path):
68 Helper function to create a path which deletes (prunes) everything from the data store except
69 things that are really needed, e.g. the event meta data and the results of the software trigger module.
71 After this path was processed, you can not use the data store content any more to do reconstruction (because
72 it is more or less empty), but can only output it to a (S)ROOT file.
74 path.add_module(
"PruneDataStore", matchEntries=constants.ALWAYS_SAVE_OBJECTS).set_name(
"KeepMetaData")
77 def add_store_only_rawdata_path(path, additonal_store_arrays_to_keep=None):
79 Helper function to create a path which deletes (prunes) everything from the data store except
80 raw objects from the detector and things that are really needed, e.g. the event meta data and the results of the
81 software trigger module.
83 After this path was processed, you can not use the data store content any more to do reconstruction (because
84 it is more or less empty), but can only output it to a (S)ROOT file.
86 entries_to_keep = constants.ALWAYS_SAVE_OBJECTS + constants.RAWDATA_OBJECTS
88 if additonal_store_arrays_to_keep:
89 entries_to_keep += additonal_store_arrays_to_keep
91 path.add_module(
"PruneDataStore", matchEntries=entries_to_keep).set_name(
"KeepRawData")
94 def add_filter_software_trigger(path,
95 store_array_debug_prescale=0,
96 use_random_numbers_for_prescale=True):
98 Add the SoftwareTrigger for the filter cuts to the given path.
100 Only the calculation of the cuts is implemented here - the cut logic has to be done
101 using the module return value.
103 :param path: The path to which the module should be added.
104 :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
105 cut calculations in the data store.
106 :param use_random_numbers_for_prescale: If True, the prescales are applied using randomly generated numbers,
107 otherwise are applied using an internal counter.
108 :return: the software trigger module
110 hlt_cut_module = path.add_module(
"SoftwareTrigger",
111 baseIdentifier=
"filter",
112 preScaleStoreDebugOutputToDataStore=store_array_debug_prescale,
113 useRandomNumbersForPreScale=use_random_numbers_for_prescale)
115 return hlt_cut_module
118 def add_skim_software_trigger(path, store_array_debug_prescale=0):
120 Add the SoftwareTrigger for the skimming (after the filtering) to the given path.
122 Only the calculation of the cuts is implemented here - the cut logic has to be done
124 :param path: The path to which the module should be added.
125 :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
126 cut calculations in the data store.
127 :return: the software trigger module
140 [[clusterReg == 1 and E > 0.03] or [clusterReg == 2 and E > 0.02] or [clusterReg == 3 and E > 0.03]] and \
141 [abs(clusterTiming) < formula(1.0 * clusterErrorTiming) or E > 0.1] and [clusterE1E9 > 0.3 or E > 0.1] ', path=path)
144 vertex.kFit(
'pi0:veryLooseFit', 0.0,
'mass', path=path)
145 D0_Cut =
'1.7 < M < 2.1'
146 D0_Ch = [
'K-:dstSkim pi+:dstSkim',
147 'K-:dstSkim pi+:dstSkim pi0:veryLooseFit',
148 'K-:dstSkim pi+:dstSkim pi-:dstSkim pi+:dstSkim',
149 'K_S0:dstSkim pi+:dstSkim pi-:dstSkim']
151 for chID, channel
in enumerate(D0_Ch):
155 Dst_Cut =
'useCMSFrame(p) > 2.2 and massDifference(0) < 0.16'
158 for chID, channel
in enumerate(D0_Ch):
161 Dst_List.append(
'D*+:ch' + str(chID))
165 path.add_module(
"SoftwareTrigger", baseIdentifier=
"skim",
166 preScaleStoreDebugOutputToDataStore=store_array_debug_prescale)
169 def add_filter_reconstruction(path, run_type, components, **kwargs):
171 Add everything needed to calculation a filter decision and if possible,
172 also do the HLT filtering. This is only possible for beam runs (in the moment).
174 Up to now, we add the full reconstruction, but this will change in the future.
176 Please note that this function adds the HLT decision, but does not branch
179 check_components(components)
181 if run_type == constants.RunTypes.beam:
184 skipGeometryAdding=
True,
186 add_trigger_calculation=
False,
187 components=components,
188 nCDCHitsMax=constants.DOOM_NCDCHITSMAX,
189 nSVDShaperDigitsMax=constants.DOOM_NSVDSHAPERDIGITSMAX,
190 event_abort=hlt_event_abort,
192 add_filter_software_trigger(path, store_array_debug_prescale=1)
193 path.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_Filter_Calculation')
195 elif run_type == constants.RunTypes.cosmic:
197 components=components, **kwargs)
198 add_filter_software_trigger(path, store_array_debug_prescale=1)
199 path.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_Filter_Calculation')
202 basf2.B2FATAL(f
"Run Type {run_type} not supported.")
205 def add_filter_module(path):
207 Add and return a skim module, which has a return value dependent
208 on the final HLT decision.
210 return path.add_module(
"TriggerSkim", triggerLines=[
"software_trigger_cut&all&total_result"])
213 def add_post_filter_reconstruction(path, run_type, components):
215 Add all modules which should run after the HLT decision is taken
216 and only on the accepted events.
217 Up to now, this only includes the skim part, but this will
218 change in the future.
220 check_components(components)
223 if run_type == constants.RunTypes.beam:
224 add_skim_software_trigger(path, store_array_debug_prescale=1)
225 path.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_Skim_Calculation')
226 elif run_type == constants.RunTypes.cosmic:
227 add_skim_software_trigger(path, store_array_debug_prescale=1)
228 path.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_Skim_Calculation')
230 basf2.B2FATAL(f
"Run Type {run_type} not supported.")
233 def hlt_event_abort(module, condition, error_flag):
235 Create a discard path suitable for HLT processing, i.e. set an error flag and
236 keep only the metadata.
239 p.add_module(
"EventErrorFlag", errorFlag=error_flag)
240 add_store_only_metadata_path(p)
241 module.if_value(condition, p, basf2.AfterConditionPath.CONTINUE)
242 if error_flag == ROOT.Belle2.EventMetaData.c_HLTDiscard:
243 p.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_Discard')