10 from softwaretrigger
import constants
11 import modularAnalysis
14 from geometry
import check_components
19 def add_online_dqm(path, run_type, dqm_environment, components, dqm_mode, create_hlt_unit_histograms=False):
21 Add DQM plots for a specific run type and dqm environment
25 from daqdqm.collisiondqm
import add_collision_dqm
26 from daqdqm.cosmicdqm
import add_cosmic_dqm
28 if run_type == constants.RunTypes.beam:
29 add_collision_dqm(path, components=components, dqm_environment=dqm_environment,
30 dqm_mode=dqm_mode, create_hlt_unit_histograms=create_hlt_unit_histograms)
31 elif run_type == constants.RunTypes.cosmic:
32 add_cosmic_dqm(path, components=components, dqm_environment=dqm_environment,
33 dqm_mode=dqm_mode, create_hlt_unit_histograms=create_hlt_unit_histograms)
35 basf2.B2FATAL(f
"Run type {run_type} not supported.")
37 if dqm_mode
in [
"dont_care",
"all_events"]:
38 path.add_module(
'DelayDQM', title=dqm_environment, histogramDirectoryName=
'DAQ')
41 def add_hlt_dqm(path, run_type, components, dqm_mode, create_hlt_unit_histograms=False):
43 Add all the DQM modules for HLT to the path
48 dqm_environment=constants.Location.hlt.name,
49 components=components,
50 dqm_mode=dqm_mode.name,
51 create_hlt_unit_histograms=create_hlt_unit_histograms)
52 path.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_DQM_' + dqm_mode.name)
55 def add_expressreco_dqm(path, run_type, components, dqm_mode=constants.DQMModes.dont_care.name):
57 Add all the DQM modules for ExpressReco to the path
59 add_online_dqm(path, run_type=run_type, dqm_environment=constants.Location.expressreco.name, components=components,
63 def add_geometry_if_not_present(path):
65 Add the geometry and gearbox module if it was not already added to the path
67 if 'Gearbox' not in path:
68 path.add_module(
'Gearbox')
70 if 'Geometry' not in path:
71 path.add_module(
'Geometry', useDB=
True)
74 def add_store_only_metadata_path(path):
76 Helper function to create a path which deletes (prunes) everything from the data store except
77 things that are really needed, e.g. the event meta data and the results of the software trigger module.
79 After this path was processed, you can not use the data store content any more to do reconstruction (because
80 it is more or less empty), but can only output it to a (S)ROOT file.
82 path.add_module(
"PruneDataStore", matchEntries=constants.ALWAYS_SAVE_OBJECTS).set_name(
"KeepMetaData")
85 def add_store_only_rawdata_path(path, additonal_store_arrays_to_keep=None):
87 Helper function to create a path which deletes (prunes) everything from the data store except
88 raw objects from the detector and things that are really needed, e.g. the event meta data and the results of the
89 software trigger module.
91 After this path was processed, you can not use the data store content any more to do reconstruction (because
92 it is more or less empty), but can only output it to a (S)ROOT file.
94 entries_to_keep = constants.ALWAYS_SAVE_OBJECTS + constants.RAWDATA_OBJECTS
96 if additonal_store_arrays_to_keep:
97 entries_to_keep += additonal_store_arrays_to_keep
99 path.add_module(
"PruneDataStore", matchEntries=entries_to_keep).set_name(
"KeepRawData")
102 def add_filter_software_trigger(path,
103 store_array_debug_prescale=0,
104 use_random_numbers_for_prescale=True):
106 Add the SoftwareTrigger for the filter cuts to the given path.
108 Only the calculation of the cuts is implemented here - the cut logic has to be done
109 using the module return value.
111 :param path: The path to which the module should be added.
112 :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
113 cut calculations in the data store.
114 :param use_random_numbers_for_prescale: If True, the prescales are applied using randomly generated numbers,
115 otherwise are applied using an internal counter.
116 :return: the software trigger module
118 hlt_cut_module = path.add_module(
"SoftwareTrigger",
119 baseIdentifier=
"filter",
120 preScaleStoreDebugOutputToDataStore=store_array_debug_prescale,
121 useRandomNumbersForPreScale=use_random_numbers_for_prescale)
123 path.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_Filter_Calculation')
125 return hlt_cut_module
128 def add_skim_software_trigger(path, store_array_debug_prescale=0):
130 Add the SoftwareTrigger for the skimming (after the filtering) to the given path.
132 Only the calculation of the cuts is implemented here - the cut logic has to be done
134 :param path: The path to which the module should be added.
135 :param store_array_debug_prescale: When not 0, store each N events the content of the variables needed for the
136 cut calculations in the data store.
137 :return: the software trigger module
150 [[clusterReg == 1 and E > 0.03] or [clusterReg == 2 and E > 0.02] or [clusterReg == 3 and E > 0.03]] and \
151 [abs(clusterTiming) < formula(1.0 * clusterErrorTiming) or E > 0.1] and [clusterE1E9 > 0.3 or E > 0.1]',
152 loadPhotonBeamBackgroundMVA=
False, path=path)
155 vertex.kFit(
'pi0:veryLooseFit', 0.0,
'mass', path=path)
156 D0_Cut =
'1.7 < M < 2.1'
157 D0_Ch = [
'K-:dstSkim pi+:dstSkim',
158 'K-:dstSkim pi+:dstSkim pi0:veryLooseFit',
159 'K-:dstSkim pi+:dstSkim pi-:dstSkim pi+:dstSkim',
160 'K_S0:dstSkim pi+:dstSkim pi-:dstSkim']
162 for chID, channel
in enumerate(D0_Ch):
166 Dst_Cut =
'useCMSFrame(p) > 2.2 and massDifference(0) < 0.16'
169 for chID, channel
in enumerate(D0_Ch):
172 Dst_List.append(
'D*+:ch' + str(chID))
175 bToCharmHLTSkim(path)
177 path.add_module(
"SoftwareTrigger", baseIdentifier=
"skim",
178 preScaleStoreDebugOutputToDataStore=store_array_debug_prescale)
181 path.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_Skim_Calculation')
184 def add_pre_filter_reconstruction(path, run_type, components, **kwargs):
186 Add everything needed to calculation a filter decision and if possible,
187 also do the HLT filtering. This is only possible for beam runs (in the moment).
189 Please note that this function adds the HLT decision, but does not branch
192 check_components(components)
194 if run_type == constants.RunTypes.beam:
197 skipGeometryAdding=
True,
199 components=components,
200 event_abort=hlt_event_abort,
203 elif run_type == constants.RunTypes.cosmic:
205 components=components, **kwargs)
208 basf2.B2FATAL(f
"Run Type {run_type} not supported.")
211 def add_filter_module(path):
213 Add and return a skim module, which has a return value dependent
214 on the final HLT decision.
216 return path.add_module(
"TriggerSkim", triggerLines=[
"software_trigger_cut&all&total_result"])
219 def add_post_filter_reconstruction(path, run_type, components):
221 Add all modules which should run after the HLT decision is taken
222 and only on the accepted events.
223 This includes reconstruction modules not essential
224 to calculate filter decision and then the skim calculation.
226 check_components(components)
228 if run_type == constants.RunTypes.beam:
231 add_skim_software_trigger(path, store_array_debug_prescale=1)
232 elif run_type == constants.RunTypes.cosmic:
233 add_skim_software_trigger(path, store_array_debug_prescale=1)
235 basf2.B2FATAL(f
"Run Type {run_type} not supported.")
238 def hlt_event_abort(module, condition, error_flag):
240 Create a discard path suitable for HLT processing, i.e. set an error flag and
241 keep only the metadata.
244 p.add_module(
"EventErrorFlag", errorFlag=error_flag)
245 add_store_only_metadata_path(p)
246 module.if_value(condition, p, basf2.AfterConditionPath.CONTINUE)
247 if error_flag == ROOT.Belle2.EventMetaData.c_HLTDiscard:
248 p.add_module(
'StatisticsSummary').set_name(
'Sum_HLT_Discard')
def fillParticleList(decayString, cut, writeOut=False, path=None, enforceFitHypothesis=False, loadPhotonsFromKLM=False, loadPhotonBeamBackgroundMVA=False)
def cutAndCopyList(outputListName, inputListName, cut, writeOut=False, path=None)
def reconstructDecay(decayString, cut, dmID=0, writeOut=False, path=None, candidate_limit=None, ignoreIfTooManyCandidates=True, chargeConjugation=True, allowChargeViolation=False)
def copyLists(outputListName, inputListNames, writeOut=False, path=None)
def add_postfilter_reconstruction(path, components=None, pruneTracks=False)
def add_cosmics_reconstruction(path, components=None, pruneTracks=True, skipGeometryAdding=False, eventTimingExtraction=True, addClusterExpertModules=True, merge_tracks=True, use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=False, posttracking=True)
def add_prefilter_reconstruction(path, components=None, add_modules_for_trigger_calculation=True, skipGeometryAdding=False, trackFitHypotheses=None, use_second_cdc_hits=False, add_muid_hits=False, reconstruct_cdst=None, addClusterExpertModules=True, pruneTracks=True, event_abort=default_event_abort, use_random_numbers_for_hlt_prescale=True)
def stdKshorts(prioritiseV0=True, fitter='TreeFit', path=None)
def stdLambdas(prioritiseV0=True, fitter='TreeFit', path=None)
def kFit(list_name, conf_level, fit_type='vertex', constraint='', daughtersUpdate=False, decay_string='', massConstraint=[], smearing=0, path=None)