14 from ROOT
import Belle2
16 from .refiners
import Refiner
23 """Getter for the logger instance of this file."""
24 return logging.getLogger(__name__)
27 def coroutine(generator_func):
28 """Famous coroutine decorator.
30 Starts a receiving generator function to the first yield,
31 such that it can receive a send call immediatly.
34 @functools.wraps(generator_func)
35 def start(*args, **kwargs):
36 cr = generator_func(*args, **kwargs)
42 def harvest(foreach="", pick=None, name=None, output_file_name=None, show_results=False):
43 """Decorator to turn a function into a HarvestingModule instance.
45 The decorated function becomes the peel method of the module.
46 The function is invoked once for each element in the foreach storearray and should return
47 a mapping of names and values extracting the relevant variables from the object in question.
52 Name of the StoreArray or StoreObjPtr
53 pick : function(obj) -> bool
54 Function that gets invoked with each object in the foreach StoreArray.
55 It can return a False value, if the object should not be investigated further.
57 Name used in tree abd histrogram names produced in this harvest.
58 output_file_name : string
59 Name of the ROOT output file to be produced.
64 A decorator that turns a function into HarvestingModule instance,
65 which peel function is replaced by the decorated function.
69 An example of the usage pattern can be found at the end of this file
72 def harvest_decorator(peel_func):
73 name_or_default = name
or peel_func.__name__
74 output_file_name_or_default = output_file_name
or "{}.root".format(name_or_default)
75 harvesting_module = HarvestingModule(foreach=foreach,
76 output_file_name=output_file_name_or_default,
78 show_results=show_results)
79 harvesting_module.peel = peel_func
81 harvesting_module.pick = pick
82 return harvesting_module
83 return harvest_decorator
88 """Python module to generate summary figures of merits, plots and/or trees
91 It runs as a proper module in the main path and examines each object in a StoreArray
97 Methods to be overwritten
99 Method called at the start of each event, that may prepare things
100 (e.g. setup lookup tables or precomputed list) used in the following methods.
102 Method called with each object in the StoreArray.
103 Returns a False value if the object should be skipped.
106 Method called with each object in the StoreArray.
107 Extractes the parts relevant for analysis and
108 returns them as MutableMapping (e.g. a dict) of part_name and values.
109 Currently only float values or values convertable to floats are supported.
110 If requested that can change in the future.
112 On termination all the collected values are recasted to numpy arrays and
113 the whole ``crops`` of the harvest are casted to MutableMapping of numpy.array
114 with the same part_name and the same MutableMapping class as returned from peel.
116 Also in the termination phase refiners a invoked with the aggregated crops.
117 Refiners can be given in two ways.
119 First way is as a class methods marked as refiners like
122 def plot(self, crops, tdirectory, **kwds):
125 where self is the module instance, crops is the MutableMapping of numpy arrays and tdirectory
126 is the current tdirectory to which the current output shall be written.
127 The additional kwds leave room for future additional arguments.
129 Second way is to define the refiner method (like plot) out of line and add it to
130 the harvesting module instance refiners list like harvesting_module.refiners.append(plot).
132 Other specialised decorators to mark a function as a Refiner such as
140 Predefined refiner functions exist in the refiners python module as well.
143 save_tree = refiners.save_tree()
145 is a predefined method to output the MutableMapping of numpy arrays as a TTree.
149 default_expert_level = 1
159 """Constructor of the harvesting module.
164 Name of a StoreArray, which objects should be investigated
165 output_file_name : string
166 Name of the ROOT file to which the results should be written.
167 Giving an opened ROOT file is also allowed.
168 If None is given write to the current ROOT file.
169 name : string, optional
170 Name of the harvest that is used in the names of ROOT plots and trees.
171 Defaults to the class name.
172 title : string, optional
173 Name of the harvest that is used in the title of ROOT plots and trees.
174 Defaults to the name.
175 contact : string, optional
176 Contact email adress to be used in the validation plots contact. Defaults to None.
177 expert_level : int, optional
178 Expert level that can be used to switch on more plots.
179 Generally the higher the more detailed to analysis.
180 Meaning depends entirely on the subclass implementing a certain policy.
181 Defaults to default_expert_level.
182 show_results : bool, optional
183 Indicator to show the refined results at termination of the path
195 raise TypeError(
"output_file_name is allowed to be a string or a ROOT.TFile object")
198 self.set_name(name
or self.__class__.__name__)
201 self.
title = title
or self.name()
218 """Working around that name() is a method.
220 Exposing the name as a property using a different name
225 """Initialisation method of the module.
227 Prepares the receiver stash of objects to be harvestered.
233 """Event method of the module
235 * Does invoke the prepare method before the iteration starts.
236 * In each event fetch the StoreArray / iterable StoreObjPtr,
237 * Iterate through all instances
238 * Feed each instance to the pick method to deside it the instance is relevant
239 * Forward it to the peel method that should generated a dictionary of values
240 * Store each dictionary of values
243 stash = self.
stash.send
246 for crop
in self.
gather():
249 if isinstance(crop, types.GeneratorType):
251 for crop
in many_crops:
257 """Termination method of the module.
259 Finalize the collected crops.
260 Start the refinement.
268 except AttributeError:
273 """Create the storing objects for the crop values
275 Currently a numpy.array of doubles is used to store all values in memory.
277 return array.array(
"d")
281 """Coroutine that receives the dictionaries of names and values from peel and store them."""
283 raw_crops = copy.copy(crop)
284 crops = copy.copy(crop)
286 if isinstance(crop, numbers.Number):
290 raw_crops.append(crop)
293 except GeneratorExit:
294 crops = np.array(raw_crops)
296 elif isinstance(crop, collections.MutableMapping):
297 for part_name
in crop:
302 for part_name, parts
in list(raw_crops.items()):
303 if part_name
in crop:
304 parts.append(crop[part_name])
309 except GeneratorExit:
310 for part_name, parts
in list(raw_crops.items()):
311 crops[part_name] = np.array(parts)
314 msg =
"Unrecognised crop {} of type {}".format(
318 raise ValueError(msg)
326 """Iterator that yield the instances form the StoreArray / iterable StoreObj.
330 Object instances from the StoreArray, iterable StoreObj or the StoreObj itself
331 in case it is not iterable.
338 foreach_is_store_obj = foreach
in registered_store_objs
339 foreach_is_store_array = foreach
in registered_store_arrays
341 if foreach
is not None:
342 if foreach_is_store_array:
344 for crop
in store_array:
347 elif foreach_is_store_obj:
354 yield store_obj.obj()
357 msg =
"Name {} does not refer to a valid object on the data store".format(
365 """Default implementation of prepare.
367 Can be overridden by subclasses.
372 """Unpack the the instances and return and dictionary of names to values or
373 a generator of those dictionaries to be saved.
378 Unpacked names and values
384 Unpacked names and values
387 return {
"name": np.nan}
390 """Indicate whether the instance should be forwarded to the peeling
394 bool : Indicator if the instance is valueable in the current harverst.
399 """Receive the gathered crops and forward them to the refiners."""
408 output_tdirectory = output_tfile
411 output_tdirectory =
None
414 with root_cd(output_tdirectory)
as tdirectory:
416 refiner(self, crops, tdirectory=output_tdirectory, **kwds)
420 for name
in dir(cls):
421 if isinstance(getattr(cls, name), Refiner):
422 refiner = getattr(self, name)
424 refiner(crops, tdirectory=output_tdirectory, **kwds)
435 root_browse(output_tfile)
436 input(
"Press enter to close")
440 input(
"Press enter to close")
444 """Obtain a iterator from a StoreObj
446 Repeatly calls iter(store_obj) or store_obj.__iter__()
447 until the final iterator returns itself
451 iterator of the StoreObj
453 iterable = store_obj.obj()
455 while iterable
is not last_iterable:
456 if hasattr(iterable,
"__iter__"):
457 iterable, last_iterable = iterable.__iter__(), iterable
459 iterable, last_iterable = iter(iterable), iterable
464 """Test a quick analysis of the MCParticles in generic events."""
466 from .refiners
import save_histograms, save_tree, save_fom
468 def primaries_seen_in_detector(mc_particle):
469 return (mc_particle.hasStatus(Belle2.MCParticle.c_PrimaryParticle)
and
470 mc_particle.hasStatus(Belle2.MCParticle.c_StableInGenerator)
and
471 not mc_particle.hasStatus(Belle2.MCParticle.c_IsVirtual)
and
472 (mc_particle.hasStatus(Belle2.MCParticle.c_LeftDetector)
or
473 mc_particle.hasStatus(Belle2.MCParticle.c_StoppedInDetector)))
476 @save_fom(aggregation=np.mean, select=[
"energy",
"pt"], name=
"physics", key=
"mean_{part_name}")
477 @save_histograms(outlier_z_score=5.0,
479 filter=
lambda xs: xs != 0.0,
480 filter_on=
"is_secondary",
481 select=[
"pt",
"is_secondary"],
482 folder_name=
"secondary_pt")
483 @save_histograms(outlier_z_score=5.0,
486 select=[
"is_secondary",
"pt"])
487 @save_histograms(outlier_z_score=5.0,
489 select=[
"is_secondary",
"pt"],
490 stackby=
"is_secondary",
491 folder_name=
"pt_stackby_is_secondary/nested_test")
492 @save_histograms(outlier_z_score=5.0,
494 select={
'pt':
'$p_t$'},
495 title=
"Distribution of p_{t}")
497 @harvest(foreach=
"MCParticles",
498 pick=primaries_seen_in_detector,
499 output_file_name=
"MCParticleOverview.root")
500 def MCParticleOverview(mc_particle):
501 momentum_tvector3 = mc_particle.getMomentum()
502 pdg_code = mc_particle.getPDG()
503 secondary_process = mc_particle.getSecondaryPhysicsProcess()
507 tan_lambda=np.divide(1.0, np.tan(momentum_tvector3.Theta())),
508 pt=momentum_tvector3.Pt(),
509 secondary_process=secondary_process,
510 is_secondary=secondary_process != 0,
511 mass=mc_particle.getMass(),
512 status=mc_particle.getStatus(),
513 pdg_mass=ROOT.TDatabasePDG.Instance().GetParticle(pdg_code).Mass(),
514 energy=mc_particle.getEnergy(),
518 from .run
import HarvestingRun
520 class ExampleHarvestingRun(HarvestingRun):
523 def harvesting_module(self):
524 return MCParticleOverview
526 ExampleHarvestingRun().configure_and_execute_from_commandline()
528 if __name__ ==
"__main__":