Belle II Software development
monitoring.py
1#!/usr/bin/env python
2
3
10
11"""
12 Contains classes to read in the monitoring output
13 and some simple plotting routines.
14
15 This is used by printReporting.py and latexReporting.py
16 to create summaries for a FEI training or application.
17"""
18
19try:
20 from generators import get_default_decayfile
21except ModuleNotFoundError:
22 print("MonitoringBranchingFractions won't work.")
23from basf2_mva_evaluation import plotting
24import basf2_mva_util
25import pickle
26import copy
27import math
28import os
29import numpy as np
30import pdg
31import fei
32
33
34def removeJPsiSlash(string):
35 """ Remove slashes in a string, which is not allowed for filenames. """
36 return string.replace('/', '')
37
38
39def load_config():
40 """ Load the FEI configuration from the Summary.pickle file. """
41 if not os.path.isfile('Summary.pickle'):
42 raise RuntimeError("""Could not find Summary.pickle!
43 This file is automatically created by the FEI training.
44 But you can also create it yourself using:
45 pickle.dump((particles, configuration), open('Summary.pickle', 'wb'))""")
46 return pickle.load(open('Summary.pickle', 'rb'))
47
48
50 """
51 This class provides the efficiency, purity and other quantities for a
52 given number of true signal candidates, signal candidates and background candidates
53 """
54
55 def __init__(self, nTrueSig, nSig, nBg):
56 """
57 Create a new Statistic object
58 @param nTrueSig the number of true signal particles
59 @param nSig the number of reconstructed signal candidates
60 @param nBg the number of reconstructed background candidates
61 """
62
63 self._nTrueSig = nTrueSig
64
65 self._nSig = nSig
66
67 self._nBg = nBg
68
69 @property
70 def nSig(self):
71 """ Returns the number of reconstructed signal candidates. """
72 return self._nSig
73
74 @property
75 def nTrueSig(self):
76 """ Returns the number of reconstructed true signal candidates. """
77 return self._nTrueSig
78
79 @property
80 def nBg(self):
81 """ Returns the number of reconstructed background candidates. """
82 return self._nBg
83
84 @property
85 def nTotal(self):
86 """ Returns total number of reconstructed candidates. """
87 return self._nSig + self._nBg
88
89 @property
90 def purity(self):
91 """ Returns the purity of the reconstructed candidates. """
92 if self._nSig == 0:
93 return 0.0
94 # \cond false positive doxygen warning
95 if self.nTotal == 0:
96 return 0.0
97 return self._nSig / float(self.nTotal)
98 # \endcond
99
100 @property
101 def efficiency(self):
102 """ Returns the efficiency of the reconstructed signal candidates with respect to the number of true signal particles. """
103 if self._nSig == 0:
104 return 0.0
105 if self._nTrueSig == 0:
106 return float('inf')
107 return self._nSig / float(self._nTrueSig)
108
109 @property
110 def purityError(self):
111 """ Returns the uncertainty of the purity. """
112 # \cond false positive doxygen warning
113 if self.nTotal == 0:
114 return 0.0
115 return self.calcStandardDeviation(self._nSig, self.nTotal)
116 # \endcond
117
118 @property
120 """
121 Returns the uncertainty of the efficiency.
122 For an efficiency eps = self._nSig/self._nTrueSig, this function calculates the
123 standard deviation according to http://arxiv.org/abs/physics/0701199 .
124 """
125 if self._nTrueSig == 0:
126 return float('inf')
127 return self.calcStandardDeviation(self._nSig, self._nTrueSig)
128
129 def calcStandardDeviation(self, k, n):
130 """ Helper method to calculate the standard deviation for efficiencies. """
131 k = float(k)
132 n = float(n)
133 variance = (k + 1) * (k + 2) / ((n + 2) * (n + 3)) - (k + 1) ** 2 / ((n + 2) ** 2)
134 if variance <= 0:
135 return 0.0
136 return math.sqrt(variance)
137
138 def __str__(self):
139 """ Returns a string representation of a Statistic object. """
140 o = f"nTrueSig {self.nTrueSig} nSig {self.nSig} nBg {self.nBg}\n"
141 o += f"Efficiency {self.efficiency:.3f} ({self.efficiencyError:.3f})\n"
142 o += f"Purity {self.purity:.3f} ({self.purityError:.3f})\n"
143 return o
144
145 def __add__(self, a):
146 """ Adds two Statistics objects and returns a new object. """
147 # \cond false positive doxygen warning
148 return Statistic(self.nTrueSig, self.nSig + a.nSig, self.nBg + a.nBg)
149 # \endcond
150
151 def __radd__(self, a):
152 """
153 Returns a new Statistic object if the current one is added to zero.
154 Necessary to apply sum-function to Statistic objects.
155 """
156 if a != 0:
157 return NotImplemented
158 # \cond false positive doxygen warning
159 return Statistic(self.nTrueSig, self.nSig, self.nBg)
160 # \endcond
161
162
164 """
165 Reads all TH1F and TH2F from a ROOT file
166 and puts them into a more accessible format.
167 """
168
169 def __init__(self, filename, dirname):
170 """
171 Reads histograms from the given file
172 @param filename the name of the ROOT file
173 """
174 # Always avoid the top-level 'import ROOT'.
175 import ROOT # noqa
176
177 self.values = {}
178
179 self.centers = {}
180
182
183 self.nbins = {}
184
185 self.valid = os.path.isfile(filename)
186
187 if not self.valid:
188 return
189
190 f = ROOT.TFile.Open(filename, 'read')
191 d = f.Get(ROOT.Belle2.MakeROOTCompatible.makeROOTCompatible(dirname))
192
193 for key in d.GetListOfKeys():
194 name = ROOT.Belle2.MakeROOTCompatible.invertMakeROOTCompatible(key.GetName())
195 hist = key.ReadObj()
196 if not (isinstance(hist, ROOT.TH1D) or isinstance(hist, ROOT.TH1F) or
197 isinstance(hist, ROOT.TH2D) or isinstance(hist, ROOT.TH2F)):
198 continue
199 self.two_dimensional[name] = isinstance(hist, ROOT.TH2D) or isinstance(hist, ROOT.TH2F)
200 if self.two_dimensional[name]:
201 nbins = (hist.GetNbinsX(), hist.GetNbinsY())
202 self.centers[name] = [[hist.GetXaxis().GetBinCenter(i) for i in range(nbins[0] + 2)],
203 [hist.GetYaxis().GetBinCenter(i) for i in range(nbins[1] + 2)]]
204 self.values[name] = [[hist.GetBinContent(i, j) for i in range(nbins[0] + 2)] for j in range(nbins[1] + 2)]
205 self.nbins[name] = nbins
206 else:
207 nbins = hist.GetNbinsX()
208 self.centers[name] = np.array([hist.GetBinCenter(i) for i in range(nbins + 2)])
209 self.values[name] = np.array([hist.GetBinContent(i) for i in range(nbins + 2)])
210 self.nbins[name] = nbins
211
212 def sum(self, name):
213 """
214 Calculates the sum of a given histogram (== sum of all entries)
215 @param name key of the histogram
216 """
217 if name not in self.centers:
218 return np.nan
219 if self.two_dimensional[name]:
220 tempsum = 0
221 for i in range(len(self.values[name])):
222 for j in range(len(self.values[name][i])):
223 tempsum += self.values[name][i][j]
224 return tempsum
225 return np.sum(self.values[name])
226
227 def mean(self, name):
228 """
229 Calculates the mean of a given histogram
230 @param name key of the histogram
231 """
232 if name not in self.centers:
233 return np.nan
234 if self.two_dimensional[name]:
235 tempsum = 0
236 for i in range(len(self.values[name])):
237 for j in range(len(self.values[name][i])):
238 tempsum += self.centers[name][i][j] * self.values[name][i][j]
239 return tempsum / self.sum(name)
240 return np.average(self.centers[name], weights=self.values[name])
241
242 def std(self, name):
243 """
244 Calculates the standard deviation of a given histogram
245 @param name key of the histogram
246 """
247 if name not in self.centers:
248 return np.nan
249 if self.two_dimensional[name]:
250 avg = self.mean(name)
251 tempsum = 0
252 for i in range(len(self.values[name])):
253 for j in range(len(self.values[name][i])):
254 tempsum += self.values[name][i][j] * (self.centers[name][i][j] - avg)**2
255 return np.sqrt(tempsum / self.sum(name))
256 avg = np.average(self.centers[name], weights=self.values[name])
257 return np.sqrt(np.average((self.centers[name] - avg)**2, weights=self.values[name]))
258
259 def min(self, name):
260 """
261 Calculates the minimum of a given histogram
262 @param name key of the histogram
263 """
264 if name not in self.centers:
265 return np.nan
266 if self.two_dimensional[name]:
267 tempmin = np.inf
268 for i in range(len(self.values[name])):
269 for j in range(len(self.values[name][i])):
270 if self.values[name][i][j] < tempmin:
271 tempmin = self.centers[name][i][j]
272 return tempmin
273 nonzero = np.nonzero(self.values[name])[0]
274 if len(nonzero) == 0:
275 return np.nan
276 return self.centers[name][nonzero[0]]
277
278 def max(self, name):
279 """
280 Calculates the maximum of a given histogram
281 @param name key of the histogram
282 """
283 if name not in self.centers:
284 return np.nan
285 if self.two_dimensional[name]:
286 tempmax = -np.inf
287 for i in range(len(self.values[name])):
288 for j in range(len(self.values[name][i])):
289 if self.values[name][i][j] > tempmax:
290 tempmax = self.centers[name][i][j]
291 return tempmax
292 nonzero = np.nonzero(self.values[name])[0]
293 if len(nonzero) == 0:
294 return np.nan
295 return self.centers[name][nonzero[-1]]
296
297
299 """
300 Reads the ntuple named variables from a ROOT file
301 """
302
303 def __init__(self, filename, treenameprefix):
304 """
305 Reads ntuple from the given file
306 @param filename the name of the ROOT file
307 """
308 # Always avoid the top-level 'import ROOT'.
309 import ROOT # noqa
310
311 self.valid = os.path.isfile(filename)
312 print(f'FEI-monitoring: Looking for {filename}')
313 if not self.valid:
314 raise RuntimeError(f"Could not find {filename}: current dir is {os.getcwd()}")
315 return
316
317 self.f = ROOT.TFile.Open(filename, 'read')
318 print(f'FEI-monitoring: Found {filename}')
319
320 self.tree = self.f.Get(ROOT.Belle2.MakeROOTCompatible.makeROOTCompatible(f'{treenameprefix} variables'))
321 print(f'FEI-monitoring: Found {treenameprefix} variables')
322
323 self.filename = filename
324
325
327 """
328 Reads the module statistics for a single particle from the outputted root file
329 and puts them into a more accessible format
330 """
331
332 def __init__(self, particle):
333 """
334 Reads the module statistics from the file named Monitor_ModuleStatistics.root
335 @param particle the particle for which the statistics are read
336 """
337 # Always avoid the top-level 'import ROOT'.
338 import ROOT # noqa
339 root_file = ROOT.TFile.Open('Monitor_ModuleStatistics.root', 'read')
340 persistentTree = root_file.Get('persistent')
341 persistentTree.GetEntry(0)
342 # Clone() needed so we actually own the object (original dies when tfile is deleted)
343 stats = persistentTree.ProcessStatistics.Clone()
344
345 # merge statistics from all persistent trees into 'stats'
346 numEntries = persistentTree.GetEntriesFast()
347 for i in range(1, numEntries):
348 persistentTree.GetEntry(i)
349 stats.merge(persistentTree.ProcessStatistics)
350
351 # TODO .getTimeSum returns always 0 at the moment ?!
352 statistic = {m.getName(): m.getTimeSum(m.c_Event) / 1e9 for m in stats.getAll()}
353
354
355 self.channel_time = {}
356
358 for channel in particle.channels:
359 if channel.label not in self.channel_time:
360 self.channel_time[channel.label] = 0.0
361 self.channel_time_per_module[channel.label] = {'ParticleCombiner': 0.0,
362 'BestCandidateSelection': 0.0,
363 'PListCutAndCopy': 0.0,
364 'VariablesToExtraInfo': 0.0,
365 'MCMatch': 0.0,
366 'ParticleSelector': 0.0,
367 'MVAExpert': 0.0,
368 'ParticleVertexFitter': 0.0,
369 'TagUniqueSignal': 0.0,
370 'VariablesToHistogram': 0.0,
371 'VariablesToNtuple': 0.0}
372 for key, time in statistic.items():
373 if (channel.decayString in key or channel.name in key):
374 self.channel_time[channel.label] += time
375 for k in self.channel_time_per_module[channel.label]:
376 if k in key:
377 self.channel_time_per_module[channel.label][k] += time
378
379
381 for key, time in statistic.items():
382 if particle.identifier in key:
383 self.particle_time += time
384
385
386def MonitorSigProbPlot(particle, filename):
387 """ Creates a Signal probability plot using ROOT. """
388 if not particle.final_ntuple.valid:
389 return
390 df = basf2_mva_util.chain2dict(particle.final_ntuple.tree,
391 ['extraInfo__bouniqueSignal__bc',
392 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
393 ['unique', 'probability', 'signal'], max_entries=int(1e8))
394
395 p = plotting.VerboseDistribution(range_in_std=5.0)
396 common = (df['probability'] >= 0) & (df['probability'] <= 1)
397 df = df[common]
398 p.add(df, 'probability', (df['signal'] == 1), label="Signal")
399 p.add(df, 'probability', (df['signal'] == 0), label="Background")
400 p.finish()
401 p.axis.set_title("Signal probability")
402 p.axis.set_xlabel("Probability")
403 p.save(filename + '.png')
404
405
406def MonitorSpectatorPlot(particle, spectator, filename, range=(None, None)):
407 """ Creates a spectator plot using ROOT. """
408 if not particle.final_ntuple.valid:
409 return
410 df = basf2_mva_util.chain2dict(particle.final_ntuple.tree,
411 ['extraInfo__bouniqueSignal__bc', spectator,
412 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
413 ['unique', spectator, 'probability', 'signal'], max_entries=int(1e8))
414 for i, cut in enumerate([0.0, 0.01, 0.05, 0.1, 0.2, 0.5]):
415 p = plotting.VerboseDistribution(range_in_std=5.0)
416 common = (df['probability'] >= cut)
417 if range[0] is not None:
418 common &= (df[spectator] >= range[0])
419 if range[1] is not None:
420 common &= (df[spectator] <= range[1])
421 df = df[common]
422 p.add(df, spectator, (df['signal'] == 1), label="Signal")
423 p.add(df, spectator, (df['signal'] == 0), label="Background")
424 p.finish()
425 p.axis.set_title(f"{spectator} for signal probability >= {cut:.2f}")
426 p.axis.set_xlabel(spectator)
427 p.save(f'{filename}_{i}.png')
428
429
430def MonitorROCPlot(particle, filename):
431 """ Creates a ROC plot using ROOT. """
432 if not particle.final_ntuple.valid:
433 return
434 df = basf2_mva_util.chain2dict(particle.final_ntuple.tree,
435 ['extraInfo__bouniqueSignal__bc',
436 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
437 ['unique', 'probability', 'signal'], max_entries=int(1e8))
439 p.add(df, 'probability', df['signal'] == 1, df['signal'] == 0, label='All')
440 p.finish()
441 p.save(filename + '.png')
442
443
444def MonitorDiagPlot(particle, filename):
445 """ Creates a Diagonal plot using ROOT. """
446 if not particle.final_ntuple.valid:
447 return
448 df = basf2_mva_util.chain2dict(particle.final_ntuple.tree,
449 ['extraInfo__bouniqueSignal__bc',
450 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
451 ['unique', 'probability', 'signal'], max_entries=int(1e8))
453 p.add(df, 'probability', df['signal'] == 1, df['signal'] == 0)
454 p.finish()
455 p.save(filename + '.png')
456
457
458def MonitoringMCCount(particle):
459 """
460 Reads the MC Counts for a given particle from the ROOT file mcParticlesCount.root
461 @param particle the particle for which the MC counts are read
462 @return dictionary with 'sum', 'std', 'avg', 'max', and 'min'
463 """
464 # Always avoid the top-level 'import ROOT'.
465 import ROOT # noqa
466 root_file = ROOT.TFile.Open('mcParticlesCount.root', 'read')
467
468 key = f'NumberOfMCParticlesInEvent({abs(pdg.from_name(particle.name))})'
469
470 key = ROOT.Belle2.MakeROOTCompatible.makeROOTCompatible(key)
471 hist = root_file.Get(key)
472
473 mc_counts = {'sum': 0, 'std': 0, 'avg': 0, 'min': 0, 'max': 0}
474 if hist:
475 mc_counts['sum'] = sum(hist.GetXaxis().GetBinCenter(bin + 1) * hist.GetBinContent(bin + 1)
476 for bin in range(hist.GetNbinsX()))
477 mc_counts['std'] = hist.GetStdDev()
478 mc_counts['avg'] = hist.GetMean()
479 mc_counts['max'] = hist.GetXaxis().GetBinCenter(hist.FindLastBinAbove(0.0))
480 mc_counts['min'] = hist.GetXaxis().GetBinCenter(hist.FindFirstBinAbove(0.0))
481 return mc_counts
482
483
485 """ Class extracts the branching fractions of a decay channel from the DECAY.DEC file. """
486
487 _shared = None
488
489 def __init__(self):
490 """
491 Create a new MonitoringBranchingFraction object.
492 The extracted branching fractions are cached, hence creating more than one object does not do anything.
493 """
494 if MonitoringBranchingFractions._shared is None:
495 decay_file = get_default_decayfile()
496
498
500 MonitoringBranchingFractions._shared = (self.exclusive_branching_fractions, self.inclusive_branching_fractions)
501 else:
502 self.exclusive_branching_fractions, self.inclusive_branching_fractions = MonitoringBranchingFractions._shared
503
504 def getExclusive(self, particle):
505 """ Returns the exclusive (i.e. without the branching fractions of the daughters) branching fraction of a particle. """
506 return self.getBranchingFraction(particle, self.exclusive_branching_fractions)
507
508 def getInclusive(self, particle):
509 """ Returns the inclusive (i.e. including all branching fractions of the daughters) branching fraction of a particle. """
510 return self.getBranchingFraction(particle, self.inclusive_branching_fractions)
511
512 def getBranchingFraction(self, particle, branching_fractions):
513 """ Returns the branching fraction of a particle given a branching_fraction table. """
514 result = {c.label: 0.0 for c in particle.channels}
515 name = particle.name
516 channels = [tuple(sorted(d.split(':')[0] for d in channel.daughters)) for channel in particle.channels]
517 if name not in branching_fractions:
518 name = pdg.conjugate(name)
519 channels = [tuple(pdg.conjugate(d) for d in channel) for channel in channels]
520 if name not in branching_fractions:
521 return result
522 for c, key in zip(particle.channels, channels):
523 if key in branching_fractions[name]:
524 result[c.label] = branching_fractions[name][key]
525 return result
526
528 """
529 Load branching fraction from MC decay-file.
530 """
531
532 def isFloat(element):
533 """ Checks if element is a convertible to float"""
534 try:
535 float(element)
536 return True
537 except ValueError:
538 return False
539
540 def isValidParticle(element):
541 """ Checks if element is a valid pdg name for a particle"""
542 try:
543 pdg.from_name(element)
544 return True
545 except LookupError:
546 return False
547
548 branching_fractions = {'UNKOWN': {}}
549
550 mother = 'UNKOWN'
551 with open(filename) as f:
552 for line in f:
553 fields = line.split(' ')
554 fields = [x for x in fields if x != '']
555 if len(fields) < 2 or fields[0][0] == '#':
556 continue
557 if fields[0] == 'Decay':
558 mother = fields[1].strip()
559 if not isValidParticle(mother):
560 mother = 'UNKOWN'
561 continue
562 if fields[0] == 'Enddecay':
563 mother = 'UNKOWN'
564 continue
565 if mother == 'UNKOWN':
566 continue
567 fields = fields[:-1]
568 if len(fields) < 1 or not isFloat(fields[0]):
569 continue
570 while len(fields) > 1:
571 if isValidParticle(fields[-1]):
572 break
573 fields = fields[:-1]
574 if len(fields) < 1 or not all(isValidParticle(p) for p in fields[1:]):
575 continue
576 neutrinoTag_list = ['nu_e', 'nu_mu', 'nu_tau', 'anti-nu_e', 'anti-nu_mu', 'anti-nu_tau']
577 daughters = tuple(sorted(p for p in fields[1:] if p not in neutrinoTag_list))
578 if mother not in branching_fractions:
579 branching_fractions[mother] = {}
580 if daughters not in branching_fractions[mother]:
581 branching_fractions[mother][daughters] = 0.0
582 branching_fractions[mother][daughters] += float(fields[0])
583
584 del branching_fractions['UNKOWN']
585 return branching_fractions
586
587 def loadInclusiveBranchingFractions(self, exclusive_branching_fractions):
588 """
589 Get covered branching fraction of a particle using a recursive algorithm
590 and the given exclusive branching_fractions (given as Hashable List)
591 @param particle identifier of the particle
592 @param branching_fractions
593 """
594 particles = set(exclusive_branching_fractions.keys())
595 particles.update({pdg.conjugate(p) for p in particles if p != pdg.conjugate(p)})
596 particles = sorted(particles, key=lambda x: pdg.get(x).Mass())
597 inclusive_branching_fractions = copy.deepcopy(exclusive_branching_fractions)
598
599 for p in particles:
600 if p in inclusive_branching_fractions:
601 br = sum(inclusive_branching_fractions[p].values())
602 else:
603 br = sum(inclusive_branching_fractions[pdg.conjugate(p)].values())
604 for p_br in inclusive_branching_fractions.values():
605 for c in p_br:
606 for i in range(c.count(p)):
607 p_br[c] *= br
608 return inclusive_branching_fractions
609
610
612 """
613 Monitoring object containing all the monitoring information
614 about a single particle
615 """
616
617 def __init__(self, particle):
618 """
619 Read the monitoring information of the given particle
620 @param particle the particle for which the information is read
621 """
622
623 self.particle = particle
624 particlesInStages = fei.core.get_stages_from_particles([particle])
625 stage = 0
626 for i in range(len(particlesInStages)):
627 for iparticle in particlesInStages[i]:
628 if iparticle.identifier == self.particle.identifier:
629 stage = i+1
630 break
631 if stage == 0:
632 raise RuntimeError(f"Could not find particle {self.particle.identifier} in the list of stages.")
633
634
635 self.mc_count = MonitoringMCCount(particle)
636
638
639 self.time_per_channel = self.module_statistic.channel_time
640
641 self.time_per_channel_per_module = self.module_statistic.channel_time_per_module
642
643 self.total_time = self.module_statistic.particle_time + sum(self.time_per_channel.values())
644
645
646 self.total_number_of_channels = len(self.particle.channels)
647
649
650
652
653 self.exc_br_per_channel = self.branching_fractions.getExclusive(particle)
654
655 self.inc_br_per_channel = self.branching_fractions.getInclusive(particle)
656
657
659
661
662 self.after_vertex = {}
663
665
667
669
670 for channel in self.particle.channels:
671 hist = MonitoringHist('Monitor_PreReconstruction_BeforeRanking.root', f'{channel.label}')
672 self.before_ranking[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
673 hist = MonitoringHist('Monitor_PreReconstruction_AfterRanking.root', f'{channel.label}')
674 self.after_ranking[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
675 hist = MonitoringHist('Monitor_PreReconstruction_AfterVertex.root', f'{channel.label}')
676 self.after_vertex[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
677 hist = MonitoringHist('Monitor_PostReconstruction_AfterMVA.root', f'{channel.label}')
678 self.after_classifier[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
679 if hist.valid and hist.sum(channel.mvaConfig.target) > 0:
681 self.ignored_channels[channel.label] = False
682 else:
683 self.ignored_channels[channel.label] = True
684 hist = MonitoringHist('Monitor_TrainingData.root', f'{channel.label}')
685 self.training_data[channel.label] = hist
686
687 plist = removeJPsiSlash(particle.identifier)
688 hist = MonitoringHist('Monitor_PostReconstruction_BeforePostCut.root', f'{plist}')
689
690 self.before_postcut = self.calculateStatistic(hist, self.particle.mvaConfig.target)
691 hist = MonitoringHist('Monitor_PostReconstruction_BeforeRanking.root', f'{plist}')
692
693 self.before_ranking_postcut = self.calculateStatistic(hist, self.particle.mvaConfig.target)
694 hist = MonitoringHist('Monitor_PostReconstruction_AfterRanking.root', f'{plist}')
695
696 self.after_ranking_postcut = self.calculateStatistic(hist, self.particle.mvaConfig.target)
697
698 self.before_tag = self.calculateStatistic(hist, self.particle.mvaConfig.target)
699
700 self.final_ntuple = MonitoringNTuple('Monitor_Final.root', f'{plist}')
701
703
704 def calculateStatistic(self, hist, target):
705 """
706 Calculate Statistic object where all signal candidates are considered signal
707 """
708 nTrueSig = self.mc_count['sum']
709 if not hist.valid:
710 return Statistic(nTrueSig, 0, 0)
711 signal_bins = (hist.centers[target] > 0.5)
712 bckgrd_bins = ~signal_bins
713 nSig = hist.values[target][signal_bins].sum()
714 nBg = hist.values[target][bckgrd_bins].sum()
715 return Statistic(nTrueSig, nSig, nBg)
716
718 """
719 Calculate Static object where only unique signal candidates are considered signal
720 """
721 nTrueSig = self.mc_count['sum']
722 if not tree:
723 return Statistic(nTrueSig, 0, 0)
724
725 nSig, nBg = 0, 0
726 for entry in tree:
727 if getattr(entry, "extraInfo__bouniqueSignal__bc") == 1:
728 nSig += 1
729 else:
730 nBg += 1
731 return Statistic(nTrueSig, nSig, nBg)
get_stages_from_particles(typing.Sequence[typing.Union[config.Particle, str]] particles)
Definition core.py:878
chain2dict(chain, tree_columns, dict_columns=None, max_entries=None)
loadInclusiveBranchingFractions(self, exclusive_branching_fractions)
exclusive_branching_fractions
exclusive branching fractions
inclusive_branching_fractions
inclusive branching fractions
getBranchingFraction(self, particle, branching_fractions)
dict nbins
Dictionary of number of bins for each histogram.
dict two_dimensional
Dictionary of 2D mode for each histogram.
valid
Indicates if the histograms were successfully read.
__init__(self, filename, dirname)
dict centers
Dictionary of bin-centers for each histogram.
dict values
Dictionary of bin-contents for each histogram.
dict channel_time_per_module
the time per module
int particle_time
the time per particle
dict channel_time
the time for each channel
filename
Filename so we can use it later.
valid
Indicates if the ntuple were successfully read.
f
Reference to the ROOT file, so it isn't closed.
tree
Reference to the tree named variables inside the ROOT file.
__init__(self, filename, treenameprefix)
int reconstructed_number_of_channels
Reconstructed number of channels.
after_ranking_postcut
Monitoring histogram in PostReconstruction after the ranking postcut.
before_postcut
Monitoring histogram in PostReconstruction before the postcut.
exc_br_per_channel
Exclusive branching fractions per channel.
dict before_ranking
Monitoring histogram in PreReconstruction before the ranking-cut.
before_ranking_postcut
Monitoring histogram in PostReconstruction before the ranking postcut.
inc_br_per_channel
Inclusive branching fraction per channel.
branching_fractions
Branching fractions.
dict ignored_channels
Dictionary containing whether the channel reconstructed at least one candidate or not.
dict after_vertex
Monitoring histogram in PreReconstruction after the vertex fit.
total_number_of_channels
Total number of channels.
particle
Particle containing its configuration.
final_ntuple
Reference to the final ntuple.
before_tag
Statistic object before unique tagging of signals.
dict training_data
Monitoring histogram for TrainingData Generation only available if Monitoring runs on the training mo...
mc_count
Dictionary with 'sum', 'std', 'mean', 'min' and 'max' of the MC counts.
calculateStatistic(self, hist, target)
after_tag
Statistic object after unique tagging of signals.
dict after_classifier
Monitoring histogram in PostReconstruction after the mva application.
dict after_ranking
Monitoring histogram in PreReconstruction after the ranking-cut.
time_per_channel_per_module
time per channel per module
module_statistic
Module statistics.
int _nSig
the number of reconstructed signal candidates
Definition monitoring.py:65
_nBg
the number of reconstructed background candidates
Definition monitoring.py:67
__init__(self, nTrueSig, nSig, nBg)
Definition monitoring.py:55
int _nTrueSig
the number of true signal particles
Definition monitoring.py:63
calcStandardDeviation(self, k, n)
STL namespace.
from_name(name)
Definition pdg.py:63
conjugate(name)
Definition pdg.py:111
get(name)
Definition pdg.py:48