Belle II Software release-09-00-03
processing.py
1
8
9import os
10import argparse
11import multiprocessing
12import tempfile
13
14import basf2
15from softwaretrigger import constants, path_utils
16from geometry import check_components
17from pxd import add_roi_finder, add_roi_payload_assembler, add_pxd_percentframe
18from rawdata import add_unpackers
19from reconstruction import add_reconstruction, add_cosmics_reconstruction
20
21
22def setup_basf2_and_db(zmq=False):
23 """
24 Setup local database usage for HLT
25 """
26 parser = argparse.ArgumentParser(description='basf2 for online')
27
28 if zmq:
29 parser.add_argument("--input", required=True, type=str, help="ZMQ Address of the distributor process")
30 parser.add_argument("--output", required=True, type=str, help="ZMQ Address of the collector process")
31 parser.add_argument("--dqm", required=True, type=str, help="ZMQ Address of the histoserver process")
32 else:
33 parser.add_argument('input_buffer_name', type=str,
34 help='Input Ring Buffer names')
35 parser.add_argument('output_buffer_name', type=str,
36 help='Output Ring Buffer name')
37 parser.add_argument('histo_port', type=int,
38 help='Port of the HistoManager to connect to')
39 parser.add_argument('--input-file', type=str,
40 help="Input sroot file, if set no RingBuffer input will be used",
41 default=None)
42 parser.add_argument('--output-file', type=str,
43 help="Filename for SeqRoot output, if set no RingBuffer output will be used",
44 default=None)
45 parser.add_argument('--histo-output-file', type=str,
46 help="Filename for histogram output",
47 default=None)
48 parser.add_argument('--no-output',
49 help="Don't write any output files",
50 action="store_true", default=False)
51
52 parser.add_argument('--number-processes', type=int, default=multiprocessing.cpu_count() - 5,
53 help='Number of parallel processes to use')
54 parser.add_argument('--local-db-path', type=str,
55 help="set path to the local payload locations to use for the ConditionDB",
56 default=constants.DEFAULT_DB_FILE_LOCATION)
57 parser.add_argument('--local-db-tag', type=str, nargs="*",
58 help="Use the local db with a specific tag (can be applied multiple times, order is relevant)")
59 parser.add_argument('--central-db-tag', type=str, nargs="*",
60 help="Use the central db with a specific tag (can be applied multiple times, order is relevant)")
61 parser.add_argument('--udp-hostname', type=str,
62 help="set hostname for UDP logging connection", default=None)
63 parser.add_argument('--udp-port', type=int,
64 help="set port number for UDP logging connection", default=None)
65
66 args = parser.parse_args()
67
68 # Local DB specification
69 basf2.conditions.override_globaltags()
70 if args.central_db_tag:
71 for central_tag in args.central_db_tag:
72 basf2.conditions.prepend_globaltag(central_tag)
73 else:
74 if args.local_db_tag:
75 for local_tag in args.local_db_tag:
76 basf2.conditions.prepend_globaltag(local_tag)
77 else:
78 basf2.conditions.globaltags = ["online"]
79 basf2.conditions.metadata_providers = ["file://" + basf2.find_file(args.local_db_path + "/metadata.sqlite")]
80 basf2.conditions.payload_locations = [basf2.find_file(args.local_db_path)]
81
82 # Number of processes
83 basf2.set_nprocesses(args.number_processes)
84
85 # basf2 logging setup
86 basf2.set_log_level(basf2.LogLevel.ERROR)
87 # And because reasons we want every log message to be only one line,
88 # otherwise the LogFilter in daq_slc throws away the other lines
89 basf2.logging.enable_escape_newlines = True
90
91 # UDP logging
92 if (args.udp_hostname is not None) and (args.udp_port is not None):
93 basf2.logging.add_udp(args.udp_hostname, args.udp_port)
94
95 # Online realm
96 basf2.set_realm("online")
97
98 return args
99
100
101def start_path(args, location):
102 """
103 Create and return a path used for HLT and ExpressReco running
104 """
105 path = basf2.create_path()
106
107 input_buffer_module_name = ""
108 if location == constants.Location.expressreco:
109 input_buffer_module_name = "Rbuf2Ds"
110 elif location == constants.Location.hlt:
111 input_buffer_module_name = "Raw2Ds"
112 else:
113 basf2.B2FATAL(f"Does not know location {location}")
114
115 # Input
116 if not args.input_file:
117 path.add_module(input_buffer_module_name, RingBufferName=args.input_buffer_name)
118 else:
119 if args.input_file.endswith(".sroot"):
120 path.add_module('SeqRootInput', inputFileName=args.input_file)
121 else:
122 path.add_module('RootInput', inputFileName=args.input_file)
123
124 # Histogram Handling
125 if not args.histo_output_file:
126 path.add_module('DqmHistoManager', Port=args.histo_port, DumpInterval=1000, workDirName=tempfile.gettempdir()+"/")
127 else:
128 workdir = os.path.dirname(args.histo_output_file)
129 filename = os.path.basename(args.histo_output_file)
130 path.add_module('HistoManager', histoFileName=filename, workDirName=workdir)
131
132 return path
133
134
135def start_zmq_path(args, location):
136 path = basf2.Path()
137 reco_path = basf2.Path()
138
139 if location == constants.Location.expressreco:
140 input_module = path.add_module("HLTZMQ2Ds", input=args.input, addExpressRecoObjects=True)
141 elif location == constants.Location.hlt:
142 input_module = path.add_module("HLTZMQ2Ds", input=args.input)
143 else:
144 basf2.B2FATAL(f"Does not know location {location}")
145
146 input_module.if_value("==0", reco_path, basf2.AfterConditionPath.CONTINUE)
147 reco_path.add_module("HLTDQM2ZMQ", output=args.dqm, sendOutInterval=30)
148
149 return path, reco_path
150
151
152def add_hlt_processing(path,
153 run_type=constants.RunTypes.beam,
154 softwaretrigger_mode=constants.SoftwareTriggerModes.filter,
155 prune_input=True,
156 prune_output=True,
157 unpacker_components=None,
158 reco_components=None,
159 create_hlt_unit_histograms=True,
160 switch_off_slow_modules_for_online=True,
161 **kwargs):
162 """
163 Add all modules for processing on HLT filter machines
164 """
165
166 # Check if the run is cosmic and set the Environment accordingly
167 if run_type == constants.RunTypes.cosmic:
168 basf2.declare_cosmics()
169
170 # Check if the run is beam and set the Environment accordingly
171 if run_type == constants.RunTypes.beam:
172 basf2.declare_beam()
173
174 # Always avoid the top-level 'import ROOT'.
175 import ROOT # noqa
176
177 path.add_module('StatisticsSummary').set_name('Sum_Wait')
178
179 if unpacker_components is None:
180 unpacker_components = constants.DEFAULT_HLT_COMPONENTS
181 if reco_components is None:
182 reco_components = constants.DEFAULT_HLT_COMPONENTS
183
184 check_components(unpacker_components)
185 check_components(reco_components)
186
187 # ensure that only DataStore content is present that we expect in
188 # in the HLT configuration. If ROIpayloads or tracks are present in the
189 # input file, this can be a problem and lead to crashes
190 if prune_input:
191 path.add_module("PruneDataStore", matchEntries=constants.HLT_INPUT_OBJECTS)
192
193 # Add the geometry (if not already present)
194 path_utils.add_geometry_if_not_present(path)
195 path.add_module('StatisticsSummary').set_name('Sum_Initialization')
196
197 # Unpack the event content
198 add_unpackers(path, components=unpacker_components, writeKLMDigitRaws=True)
199 path.add_module('StatisticsSummary').set_name('Sum_Unpackers')
200
201 # Build one path for all accepted events...
202 accept_path = basf2.Path()
203
204 # Do the reconstruction needed for the HLT decision
205 path_utils.add_pre_filter_reconstruction(
206 path,
207 run_type=run_type,
208 components=reco_components,
209 switch_off_slow_modules_for_online=switch_off_slow_modules_for_online,
210 **kwargs
211 )
212
213 # Perform HLT filter calculation
214 path_utils.add_filter_software_trigger(path, store_array_debug_prescale=1)
215
216 # Add the part of the dqm modules, which should run after every reconstruction
217 path_utils.add_hlt_dqm(path, run_type=run_type, components=reco_components, dqm_mode=constants.DQMModes.before_filter,
218 create_hlt_unit_histograms=create_hlt_unit_histograms)
219
220 # Only turn on the filtering (by branching the path) if filtering is turned on
221 if softwaretrigger_mode == constants.SoftwareTriggerModes.filter:
222 # Now split up the path according to the HLT decision
223 hlt_filter_module = path_utils.add_filter_module(path)
224
225 # There are two possibilities for the output of this module
226 # (1) the event is dismissed -> only store the metadata
227 path_utils.hlt_event_abort(hlt_filter_module, "==0", ROOT.Belle2.EventMetaData.c_HLTDiscard)
228 # (2) the event is accepted -> go on with the hlt reconstruction
229 hlt_filter_module.if_value("==1", accept_path, basf2.AfterConditionPath.CONTINUE)
230 elif softwaretrigger_mode == constants.SoftwareTriggerModes.monitor:
231 # Otherwise just always go with the accept path
232 path.add_path(accept_path)
233 else:
234 basf2.B2FATAL(f"The software trigger mode {softwaretrigger_mode} is not supported.")
235
236 # For accepted events we continue the reconstruction
237 path_utils.add_post_filter_reconstruction(
238 accept_path,
239 run_type=run_type,
240 components=reco_components,
241 switch_off_slow_modules_for_online=switch_off_slow_modules_for_online
242 )
243
244 # Only create the ROIs for accepted events
245 add_roi_finder(accept_path)
246 accept_path.add_module('StatisticsSummary').set_name('Sum_ROI_Finder')
247
248 # Add the HLT DQM modules only in case the event is accepted
249 path_utils.add_hlt_dqm(
250 accept_path,
251 run_type=run_type,
252 components=reco_components,
253 dqm_mode=constants.DQMModes.filtered,
254 create_hlt_unit_histograms=create_hlt_unit_histograms)
255
256 # Make sure to create ROI payloads for sending them to ONSEN
257 # Do this for all events
258 # Normally, the payload assembler marks the event with the software trigger decision to inform the hardware to
259 # drop the data for the event in case the decision is "no"
260 # However, if we are running in monitoring mode, we ignore the decision
261 pxd_ignores_hlt_decision = (softwaretrigger_mode == constants.SoftwareTriggerModes.monitor)
262 add_roi_payload_assembler(path, ignore_hlt_decision=pxd_ignores_hlt_decision)
263 path.add_module('StatisticsSummary').set_name('Sum_ROI_Payload_Assembler')
264
265 # Add the part of the dqm modules, which should run on all events, not only on the accepted ones
266 path_utils.add_hlt_dqm(path, run_type=run_type, components=reco_components, dqm_mode=constants.DQMModes.all_events,
267 create_hlt_unit_histograms=create_hlt_unit_histograms)
268
269 if prune_output:
270 # And in the end remove everything which should not be stored
271 path_utils.add_store_only_rawdata_path(path)
272 path.add_module('StatisticsSummary').set_name('Sum_Close_Event')
273
274
275def add_hlt_passthrough(path):
276 """
277 Add all modules for operating HLT machines in passthrough mode.
278 """
279 add_pxd_percentframe(path, fraction=0.1, random_position=True)
280 add_roi_payload_assembler(path, ignore_hlt_decision=True)
281
282
283def add_expressreco_processing(path,
284 run_type=constants.RunTypes.beam,
285 select_only_accepted_events=False,
286 prune_input=True,
287 prune_output=True,
288 unpacker_components=None,
289 reco_components=None,
290 do_reconstruction=True,
291 switch_off_slow_modules_for_online=True,
292 **kwargs):
293 """
294 Add all modules for processing on the ExpressReco machines
295 """
296
297 # Check if the run is cosmic and set the Environment accordingly
298 if run_type == constants.RunTypes.cosmic:
299 basf2.declare_cosmics()
300
301 # Check if the run is beam and set the Environment accordingly
302 if run_type == constants.RunTypes.beam:
303 basf2.declare_beam()
304
305 if unpacker_components is None:
306 unpacker_components = constants.DEFAULT_EXPRESSRECO_COMPONENTS
307 if reco_components is None:
308 reco_components = constants.DEFAULT_EXPRESSRECO_COMPONENTS
309
310 check_components(unpacker_components)
311 check_components(reco_components)
312
313 # If turned on, only events selected by the HLT will go to ereco.
314 # this is needed as by default also un-selected events will get passed to ereco,
315 # however they are empty.
316 if select_only_accepted_events:
317 skim_module = path.add_module("TriggerSkim", triggerLines=["software_trigger_cut&all&total_result"], resultOnMissing=0)
318 skim_module.if_value("==0", basf2.Path(), basf2.AfterConditionPath.END)
319
320 # ensure that only DataStore content is present that we expect in
321 # in the ExpressReco configuration. If tracks are present in the
322 # input file, this can be a problem and lead to crashes
323 if prune_input:
324 path.add_module("PruneDataStore", matchEntries=constants.EXPRESSRECO_INPUT_OBJECTS)
325
326 path_utils.add_geometry_if_not_present(path)
327 add_unpackers(path, components=unpacker_components, writeKLMDigitRaws=True)
328
329 # dont filter/prune pxd for partly broken events, as we loose diagnostics in DQM
330 basf2.set_module_parameters(path, "PXDPostErrorChecker", CriticalErrorMask=0)
331
332 if do_reconstruction:
333 if run_type == constants.RunTypes.beam:
334 add_reconstruction(path,
335 components=reco_components,
336 pruneTracks=False,
337 skipGeometryAdding=True,
338 add_trigger_calculation=False,
339 switch_off_slow_modules_for_online=switch_off_slow_modules_for_online,
340 **kwargs)
341 elif run_type == constants.RunTypes.cosmic:
342 add_cosmics_reconstruction(path, components=reco_components, pruneTracks=False,
343 skipGeometryAdding=True, **kwargs)
344 else:
345 basf2.B2FATAL(f"Run Type {run_type} not supported.")
346
347 basf2.set_module_parameters(path, "SVDTimeGrouping", forceGroupingFromDB=False,
348 isEnabledIn6Samples=True, isEnabledIn3Samples=True)
349
350 path_utils.add_expressreco_dqm(path, run_type, components=reco_components)
351
352 if prune_output:
353 path.add_module("PruneDataStore", matchEntries=constants.ALWAYS_SAVE_OBJECTS + constants.RAWDATA_OBJECTS +
354 constants.PROCESSED_OBJECTS)
355
356
357def finalize_path(path, args, location, show_progress_bar=True):
358 """
359 Add the required output modules for expressreco/HLT
360 """
361 save_objects = constants.ALWAYS_SAVE_OBJECTS + constants.RAWDATA_OBJECTS
362 if location == constants.Location.expressreco:
363 save_objects += constants.PROCESSED_OBJECTS
364
365 if show_progress_bar:
366 path.add_module("Progress")
367
368 # Limit streaming objects for parallel processing
369 basf2.set_streamobjs(save_objects)
370
371 if args.no_output:
372 return
373
374 output_buffer_module_name = ""
375 if location == constants.Location.expressreco:
376 output_buffer_module_name = "Ds2Sample"
377 elif location == constants.Location.hlt:
378 output_buffer_module_name = "Ds2Rbuf"
379 else:
380 basf2.B2FATAL(f"Does not know location {location}")
381
382 if not args.output_file:
383 path.add_module(output_buffer_module_name, RingBufferName=args.output_buffer_name,
384 saveObjs=save_objects)
385 else:
386 if args.output_file.endswith(".sroot"):
387 path.add_module("SeqRootOutput", saveObjs=save_objects, outputFileName=args.output_file)
388 else:
389 # We are storing everything on purpose!
390 path.add_module("RootOutput", outputFileName=args.output_file)
391
392
393def finalize_zmq_path(path, args, location):
394 """
395 Add the required output modules for expressreco/HLT
396 """
397 save_objects = constants.ALWAYS_SAVE_OBJECTS + constants.RAWDATA_OBJECTS
398 if location == constants.Location.expressreco:
399 save_objects += constants.PROCESSED_OBJECTS
400
401 # Limit streaming objects for parallel processing
402 basf2.set_streamobjs(save_objects)
403
404 if location == constants.Location.expressreco:
405 path.add_module("HLTDs2ZMQ", output=args.output, raw=False, outputConfirmation=False)
406 elif location == constants.Location.hlt:
407 path.add_module("HLTDs2ZMQ", output=args.output, raw=True, outputConfirmation=True)
408 else:
409 basf2.B2FATAL(f"Does not know location {location}")