Belle II Software development
monitoring.py
1#!/usr/bin/env python
2
3
10
11# @cond SUPPRESS_DOXYGEN
12
13"""
14 Contains classes to read in the monitoring output
15 and some simple plotting routines.
16
17 This is used by printReporting.py and latexReporting.py
18 to create summaries for a FEI training or application.
19"""
20
21try:
22 from generators import get_default_decayfile
23except ModuleNotFoundError:
24 print("MonitoringBranchingFractions won't work.")
25from basf2_mva_evaluation import plotting
26import basf2_mva_util
27import pickle
28import copy
29import math
30import os
31import numpy as np
32import pdg
33import fei
34
35
36def removeJPsiSlash(string):
37 """ Remove slashes in a string, which is not allowed for filenames. """
38 return string.replace('/', '')
39
40
41def load_config():
42 """ Load the FEI configuration from the Summary.pickle file. """
43 if not os.path.isfile('Summary.pickle'):
44 raise RuntimeError("""Could not find Summary.pickle!
45 This file is automatically created by the FEI training.
46 But you can also create it yourself using:
47 pickle.dump((particles, configuration), open('Summary.pickle', 'wb'))""")
48 return pickle.load(open('Summary.pickle', 'rb'))
49
50
51class Statistic:
52 """
53 This class provides the efficiency, purity and other quantities for a
54 given number of true signal candidates, signal candidates and background candidates
55 """
56
57 def __init__(self, nTrueSig, nSig, nBg):
58 """
59 Create a new Statistic object
60 @param nTrueSig the number of true signal particles
61 @param nSig the number of reconstructed signal candidates
62 @param nBg the number of reconstructed background candidates
63 """
64
65 self._nTrueSig = nTrueSig
66
67 self._nSig = nSig
68
69 self._nBg = nBg
70
71 @property
72 def nSig(self):
73 """ Returns the number of reconstructed signal candidates. """
74 return self._nSig
75
76 @property
77 def nTrueSig(self):
78 """ Returns the number of reconstructed true signal candidates. """
79 return self._nTrueSig
80
81 @property
82 def nBg(self):
83 """ Returns the number of reconstructed background candidates. """
84 return self._nBg
85
86 @property
87 def nTotal(self):
88 """ Returns total number of reconstructed candidates. """
89 return self._nSig + self._nBg
90
91 @property
92 def purity(self):
93 """ Returns the purity of the reconstructed candidates. """
94 if self._nSig == 0:
95 return 0.0
96 if self.nTotal == 0:
97 return 0.0
98 return self._nSig / float(self.nTotal)
99
100 @property
101 def efficiency(self):
102 """ Returns the efficiency of the reconstructed signal candidates with respect to the number of true signal particles. """
103 if self._nSig == 0:
104 return 0.0
105 if self._nTrueSig == 0:
106 return float('inf')
107 return self._nSig / float(self._nTrueSig)
108
109 @property
110 def purityError(self):
111 """ Returns the uncertainty of the purity. """
112 if self.nTotal == 0:
113 return 0.0
114 return self.calcStandardDeviation(self._nSig, self.nTotal)
115
116 @property
117 def efficiencyError(self):
118 """
119 Returns the uncertainty of the efficiency.
120 For an efficiency eps = self._nSig/self._nTrueSig, this function calculates the
121 standard deviation according to http://arxiv.org/abs/physics/0701199 .
122 """
123 if self._nTrueSig == 0:
124 return float('inf')
125 return self.calcStandardDeviation(self._nSig, self._nTrueSig)
126
127 def calcStandardDeviation(self, k, n):
128 """ Helper method to calculate the standard deviation for efficiencies. """
129 k = float(k)
130 n = float(n)
131 variance = (k + 1) * (k + 2) / ((n + 2) * (n + 3)) - (k + 1) ** 2 / ((n + 2) ** 2)
132 if variance <= 0:
133 return 0.0
134 return math.sqrt(variance)
135
136 def __str__(self):
137 """ Returns a string representation of a Statistic object. """
138 o = f"nTrueSig {self.nTrueSig} nSig {self.nSig} nBg {self.nBg}\n"
139 o += f"Efficiency {self.efficiency:.3f} ({self.efficiencyError:.3f})\n"
140 o += f"Purity {self.purity:.3f} ({self.purityError:.3f})\n"
141 return o
142
143 def __add__(self, a):
144 """ Adds two Statistics objects and returns a new object. """
145 return Statistic(self.nTrueSig, self.nSig + a.nSig, self.nBg + a.nBg)
146
147 def __radd__(self, a):
148 """
149 Returns a new Statistic object if the current one is added to zero.
150 Necessary to apply sum-function to Statistic objects.
151 """
152 if a != 0:
153 return NotImplemented
154 return Statistic(self.nTrueSig, self.nSig, self.nBg)
155
156
157class MonitoringHist:
158 """
159 Reads all TH1F and TH2F from a ROOT file
160 and puts them into a more accessible format.
161 """
162
163 def __init__(self, filename, dirname):
164 """
165 Reads histograms from the given file
166 @param filename the name of the ROOT file
167 """
168 # Always avoid the top-level 'import ROOT'.
169 import ROOT # noqa
170
171 self.values = {}
172
173 self.centers = {}
174
175 self.two_dimensional = {}
176
177 self.nbins = {}
178
179 self.valid = os.path.isfile(filename)
180
181 if not self.valid:
182 return
183
184 f = ROOT.TFile.Open(filename, 'read')
185 d = f.Get(ROOT.Belle2.MakeROOTCompatible.makeROOTCompatible(dirname))
186
187 for key in d.GetListOfKeys():
188 name = ROOT.Belle2.MakeROOTCompatible.invertMakeROOTCompatible(key.GetName())
189 hist = key.ReadObj()
190 if not (isinstance(hist, ROOT.TH1D) or isinstance(hist, ROOT.TH1F) or
191 isinstance(hist, ROOT.TH2D) or isinstance(hist, ROOT.TH2F)):
192 continue
193 self.two_dimensional[name] = isinstance(hist, ROOT.TH2D) or isinstance(hist, ROOT.TH2F)
194 if self.two_dimensional[name]:
195 nbins = (hist.GetNbinsX(), hist.GetNbinsY())
196 self.centers[name] = [[hist.GetXaxis().GetBinCenter(i) for i in range(nbins[0] + 2)],
197 [hist.GetYaxis().GetBinCenter(i) for i in range(nbins[1] + 2)]]
198 self.values[name] = [[hist.GetBinContent(i, j) for i in range(nbins[0] + 2)] for j in range(nbins[1] + 2)]
199 self.nbins[name] = nbins
200 else:
201 nbins = hist.GetNbinsX()
202 self.centers[name] = np.array([hist.GetBinCenter(i) for i in range(nbins + 2)])
203 self.values[name] = np.array([hist.GetBinContent(i) for i in range(nbins + 2)])
204 self.nbins[name] = nbins
205
206 def sum(self, name):
207 """
208 Calculates the sum of a given histogram (== sum of all entries)
209 @param name key of the histogram
210 """
211 if name not in self.centers:
212 return np.nan
213 if self.two_dimensional[name]:
214 tempsum = 0
215 for i in range(len(self.values[name])):
216 for j in range(len(self.values[name][i])):
217 tempsum += self.values[name][i][j]
218 return tempsum
219 return np.sum(self.values[name])
220
221 def mean(self, name):
222 """
223 Calculates the mean of a given histogram
224 @param name key of the histogram
225 """
226 if name not in self.centers:
227 return np.nan
228 if self.two_dimensional[name]:
229 tempsum = 0
230 for i in range(len(self.values[name])):
231 for j in range(len(self.values[name][i])):
232 tempsum += self.centers[name][i][j] * self.values[name][i][j]
233 return tempsum / self.sum(name)
234 return np.average(self.centers[name], weights=self.values[name])
235
236 def std(self, name):
237 """
238 Calculates the standard deviation of a given histogram
239 @param name key of the histogram
240 """
241 if name not in self.centers:
242 return np.nan
243 if self.two_dimensional[name]:
244 avg = self.mean(name)
245 tempsum = 0
246 for i in range(len(self.values[name])):
247 for j in range(len(self.values[name][i])):
248 tempsum += self.values[name][i][j] * (self.centers[name][i][j] - avg)**2
249 return np.sqrt(tempsum / self.sum(name))
250 avg = np.average(self.centers[name], weights=self.values[name])
251 return np.sqrt(np.average((self.centers[name] - avg)**2, weights=self.values[name]))
252
253 def min(self, name):
254 """
255 Calculates the minimum of a given histogram
256 @param name key of the histogram
257 """
258 if name not in self.centers:
259 return np.nan
260 if self.two_dimensional[name]:
261 tempmin = np.inf
262 for i in range(len(self.values[name])):
263 for j in range(len(self.values[name][i])):
264 if self.values[name][i][j] < tempmin:
265 tempmin = self.centers[name][i][j]
266 return tempmin
267 nonzero = np.nonzero(self.values[name])[0]
268 if len(nonzero) == 0:
269 return np.nan
270 return self.centers[name][nonzero[0]]
271
272 def max(self, name):
273 """
274 Calculates the maximum of a given histogram
275 @param name key of the histogram
276 """
277 if name not in self.centers:
278 return np.nan
279 if self.two_dimensional[name]:
280 tempmax = -np.inf
281 for i in range(len(self.values[name])):
282 for j in range(len(self.values[name][i])):
283 if self.values[name][i][j] > tempmax:
284 tempmax = self.centers[name][i][j]
285 return tempmax
286 nonzero = np.nonzero(self.values[name])[0]
287 if len(nonzero) == 0:
288 return np.nan
289 return self.centers[name][nonzero[-1]]
290
291
292class MonitoringNTuple:
293 """
294 Reads the ntuple named variables from a ROOT file
295 """
296
297 def __init__(self, filename, treenameprefix):
298 """
299 Reads ntuple from the given file
300 @param filename the name of the ROOT file
301 """
302 # Always avoid the top-level 'import ROOT'.
303 import ROOT # noqa
304
305 self.valid = os.path.isfile(filename)
306 print(f'FEI-monitoring: Looking for {filename}')
307 if not self.valid:
308 raise RuntimeError(f"Could not find {filename}: current dir is {os.getcwd()}")
309 return
310
311 self.f = ROOT.TFile.Open(filename, 'read')
312 print(f'FEI-monitoring: Found {filename}')
313
314 self.tree = self.f.Get(ROOT.Belle2.MakeROOTCompatible.makeROOTCompatible(f'{treenameprefix} variables'))
315 print(f'FEI-monitoring: Found {treenameprefix} variables')
316
317 self.filename = filename
318
319
320class MonitoringModuleStatistics:
321 """
322 Reads the module statistics for a single particle from the outputted root file
323 and puts them into a more accessible format
324 """
325
326 def __init__(self, particle):
327 """
328 Reads the module statistics from the file named Monitor_ModuleStatistics.root
329 @param particle the particle for which the statistics are read
330 """
331 # Always avoid the top-level 'import ROOT'.
332 import ROOT # noqa
333 root_file = ROOT.TFile.Open('Monitor_ModuleStatistics.root', 'read')
334 persistentTree = root_file.Get('persistent')
335 persistentTree.GetEntry(0)
336 # Clone() needed so we actually own the object (original dies when tfile is deleted)
337 stats = persistentTree.ProcessStatistics.Clone()
338
339 # merge statistics from all persistent trees into 'stats'
340 numEntries = persistentTree.GetEntriesFast()
341 for i in range(1, numEntries):
342 persistentTree.GetEntry(i)
343 stats.merge(persistentTree.ProcessStatistics)
344
345 # TODO .getTimeSum returns always 0 at the moment ?!
346 statistic = {m.getName(): m.getTimeSum(m.c_Event) / 1e9 for m in stats.getAll()}
347
348
349 self.channel_time = {}
350
351 self.channel_time_per_module = {}
352 for channel in particle.channels:
353 if channel.label not in self.channel_time:
354 self.channel_time[channel.label] = 0.0
355 self.channel_time_per_module[channel.label] = {'ParticleCombiner': 0.0,
356 'BestCandidateSelection': 0.0,
357 'PListCutAndCopy': 0.0,
358 'VariablesToExtraInfo': 0.0,
359 'MCMatch': 0.0,
360 'ParticleSelector': 0.0,
361 'MVAExpert': 0.0,
362 'ParticleVertexFitter': 0.0,
363 'TagUniqueSignal': 0.0,
364 'VariablesToHistogram': 0.0,
365 'VariablesToNtuple': 0.0}
366 for key, time in statistic.items():
367 if (channel.decayString in key or channel.name in key):
368 self.channel_time[channel.label] += time
369 for k in self.channel_time_per_module[channel.label]:
370 if k in key:
371 self.channel_time_per_module[channel.label][k] += time
372
373
374 self.particle_time = 0
375 for key, time in statistic.items():
376 if particle.identifier in key:
377 self.particle_time += time
378
379
380def MonitorSigProbPlot(particle, filename):
381 """ Creates a Signal probability plot using ROOT. """
382 if not particle.final_ntuple.valid:
383 return
384 df = basf2_mva_util.chain2dict(particle.final_ntuple.tree,
385 ['extraInfo__bouniqueSignal__bc',
386 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
387 ['unique', 'probability', 'signal'], max_entries=int(1e8))
388
389 p = plotting.VerboseDistribution(range_in_std=5.0)
390 common = (df['probability'] >= 0) & (df['probability'] <= 1)
391 df = df[common]
392 p.add(df, 'probability', (df['signal'] == 1), label="Signal")
393 p.add(df, 'probability', (df['signal'] == 0), label="Background")
394 p.finish()
395 p.axis.set_title("Signal probability")
396 p.axis.set_xlabel("Probability")
397 p.save(filename + '.png')
398
399
400def MonitorSpectatorPlot(particle, spectator, filename, range=(None, None)):
401 """ Creates a spectator plot using ROOT. """
402 if not particle.final_ntuple.valid:
403 return
404 df = basf2_mva_util.chain2dict(particle.final_ntuple.tree,
405 ['extraInfo__bouniqueSignal__bc', spectator,
406 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
407 ['unique', spectator, 'probability', 'signal'], max_entries=int(1e8))
408 for i, cut in enumerate([0.0, 0.01, 0.05, 0.1, 0.2, 0.5]):
409 p = plotting.VerboseDistribution(range_in_std=5.0)
410 common = (df['probability'] >= cut)
411 if range[0] is not None:
412 common &= (df[spectator] >= range[0])
413 if range[1] is not None:
414 common &= (df[spectator] <= range[1])
415 df = df[common]
416 p.add(df, spectator, (df['signal'] == 1), label="Signal")
417 p.add(df, spectator, (df['signal'] == 0), label="Background")
418 p.finish()
419 p.axis.set_title(f"{spectator} for signal probability >= {cut:.2f}")
420 p.axis.set_xlabel(spectator)
421 p.save(f'{filename}_{i}.png')
422
423
424def MonitorROCPlot(particle, filename):
425 """ Creates a ROC plot using ROOT. """
426 if not particle.final_ntuple.valid:
427 return
428 df = basf2_mva_util.chain2dict(particle.final_ntuple.tree,
429 ['extraInfo__bouniqueSignal__bc',
430 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
431 ['unique', 'probability', 'signal'], max_entries=int(1e8))
433 p.add(df, 'probability', df['signal'] == 1, df['signal'] == 0, label='All')
434 p.finish()
435 p.save(filename + '.png')
436
437
438def MonitorDiagPlot(particle, filename):
439 """ Creates a Diagonal plot using ROOT. """
440 if not particle.final_ntuple.valid:
441 return
442 df = basf2_mva_util.chain2dict(particle.final_ntuple.tree,
443 ['extraInfo__bouniqueSignal__bc',
444 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
445 ['unique', 'probability', 'signal'], max_entries=int(1e8))
447 p.add(df, 'probability', df['signal'] == 1, df['signal'] == 0)
448 p.finish()
449 p.save(filename + '.png')
450
451
452def MonitoringMCCount(particle):
453 """
454 Reads the MC Counts for a given particle from the ROOT file mcParticlesCount.root
455 @param particle the particle for which the MC counts are read
456 @return dictionary with 'sum', 'std', 'avg', 'max', and 'min'
457 """
458 # Always avoid the top-level 'import ROOT'.
459 import ROOT # noqa
460 root_file = ROOT.TFile.Open('mcParticlesCount.root', 'read')
461
462 key = f'NumberOfMCParticlesInEvent({abs(pdg.from_name(particle.name))})'
463
464 key = ROOT.Belle2.MakeROOTCompatible.makeROOTCompatible(key)
465 hist = root_file.Get(key)
466
467 mc_counts = {'sum': 0, 'std': 0, 'avg': 0, 'min': 0, 'max': 0}
468 if hist:
469 mc_counts['sum'] = sum(hist.GetXaxis().GetBinCenter(bin + 1) * hist.GetBinContent(bin + 1)
470 for bin in range(hist.GetNbinsX()))
471 mc_counts['std'] = hist.GetStdDev()
472 mc_counts['avg'] = hist.GetMean()
473 mc_counts['max'] = hist.GetXaxis().GetBinCenter(hist.FindLastBinAbove(0.0))
474 mc_counts['min'] = hist.GetXaxis().GetBinCenter(hist.FindFirstBinAbove(0.0))
475 return mc_counts
476
477
478class MonitoringBranchingFractions:
479 """ Class extracts the branching fractions of a decay channel from the DECAY.DEC file. """
480
481 _shared = None
482
483 def __init__(self):
484 """
485 Create a new MonitoringBranchingFraction object.
486 The extracted branching fractions are cached, hence creating more than one object does not do anything.
487 """
488 if MonitoringBranchingFractions._shared is None:
489 decay_file = get_default_decayfile()
490
491 self.exclusive_branching_fractions = self.loadExclusiveBranchingFractions(decay_file)
492
493 self.inclusive_branching_fractions = self.loadInclusiveBranchingFractions(self.exclusive_branching_fractions)
494 MonitoringBranchingFractions._shared = (self.exclusive_branching_fractions, self.inclusive_branching_fractions)
495 else:
496 self.exclusive_branching_fractions, self.inclusive_branching_fractions = MonitoringBranchingFractions._shared
497
498 def getExclusive(self, particle):
499 """ Returns the exclusive (i.e. without the branching fractions of the daughters) branching fraction of a particle. """
500 return self.getBranchingFraction(particle, self.exclusive_branching_fractions)
501
502 def getInclusive(self, particle):
503 """ Returns the inclusive (i.e. including all branching fractions of the daughters) branching fraction of a particle. """
504 return self.getBranchingFraction(particle, self.inclusive_branching_fractions)
505
506 def getBranchingFraction(self, particle, branching_fractions):
507 """ Returns the branching fraction of a particle given a branching_fraction table. """
508 result = {c.label: 0.0 for c in particle.channels}
509 name = particle.name
510 channels = [tuple(sorted(d.split(':')[0] for d in channel.daughters)) for channel in particle.channels]
511 if name not in branching_fractions:
512 name = pdg.conjugate(name)
513 channels = [tuple(pdg.conjugate(d) for d in channel) for channel in channels]
514 if name not in branching_fractions:
515 return result
516 for c, key in zip(particle.channels, channels):
517 if key in branching_fractions[name]:
518 result[c.label] = branching_fractions[name][key]
519 return result
520
521 def loadExclusiveBranchingFractions(self, filename):
522 """
523 Load branching fraction from MC decay-file.
524 """
525
526 def isFloat(element):
527 """ Checks if element is a convertible to float"""
528 try:
529 float(element)
530 return True
531 except ValueError:
532 return False
533
534 def isValidParticle(element):
535 """ Checks if element is a valid pdg name for a particle"""
536 try:
537 pdg.from_name(element)
538 return True
539 except LookupError:
540 return False
541
542 branching_fractions = {'UNKOWN': {}}
543
544 mother = 'UNKOWN'
545 with open(filename) as f:
546 for line in f:
547 fields = line.split(' ')
548 fields = [x for x in fields if x != '']
549 if len(fields) < 2 or fields[0][0] == '#':
550 continue
551 if fields[0] == 'Decay':
552 mother = fields[1].strip()
553 if not isValidParticle(mother):
554 mother = 'UNKOWN'
555 continue
556 if fields[0] == 'Enddecay':
557 mother = 'UNKOWN'
558 continue
559 if mother == 'UNKOWN':
560 continue
561 fields = fields[:-1]
562 if len(fields) < 1 or not isFloat(fields[0]):
563 continue
564 while len(fields) > 1:
565 if isValidParticle(fields[-1]):
566 break
567 fields = fields[:-1]
568 if len(fields) < 1 or not all(isValidParticle(p) for p in fields[1:]):
569 continue
570 neutrinoTag_list = ['nu_e', 'nu_mu', 'nu_tau', 'anti-nu_e', 'anti-nu_mu', 'anti-nu_tau']
571 daughters = tuple(sorted(p for p in fields[1:] if p not in neutrinoTag_list))
572 if mother not in branching_fractions:
573 branching_fractions[mother] = {}
574 if daughters not in branching_fractions[mother]:
575 branching_fractions[mother][daughters] = 0.0
576 branching_fractions[mother][daughters] += float(fields[0])
577
578 del branching_fractions['UNKOWN']
579 return branching_fractions
580
581 def loadInclusiveBranchingFractions(self, exclusive_branching_fractions):
582 """
583 Get covered branching fraction of a particle using a recursive algorithm
584 and the given exclusive branching_fractions (given as Hashable List)
585 @param particle identifier of the particle
586 @param branching_fractions
587 """
588 particles = set(exclusive_branching_fractions.keys())
589 particles.update({pdg.conjugate(p) for p in particles if p != pdg.conjugate(p)})
590 particles = sorted(particles, key=lambda x: pdg.get(x).Mass())
591 inclusive_branching_fractions = copy.deepcopy(exclusive_branching_fractions)
592
593 for p in particles:
594 if p in inclusive_branching_fractions:
595 br = sum(inclusive_branching_fractions[p].values())
596 else:
597 br = sum(inclusive_branching_fractions[pdg.conjugate(p)].values())
598 for p_br in inclusive_branching_fractions.values():
599 for c in p_br:
600 for i in range(c.count(p)):
601 p_br[c] *= br
602 return inclusive_branching_fractions
603
604
605class MonitoringParticle:
606 """
607 Monitoring object containing all the monitoring information
608 about a single particle
609 """
610
611 def __init__(self, particle):
612 """
613 Read the monitoring information of the given particle
614 @param particle the particle for which the information is read
615 """
616
617 self.particle = particle
618 particlesInStages = fei.core.get_stages_from_particles([particle])
619 stage = 0
620 for i in range(len(particlesInStages)):
621 for iparticle in particlesInStages[i]:
622 if iparticle.identifier == self.particle.identifier:
623 stage = i+1
624 break
625 if stage == 0:
626 raise RuntimeError(f"Could not find particle {self.particle.identifier} in the list of stages.")
627
628
629 self.mc_count = MonitoringMCCount(particle)
630
631 self.module_statistic = MonitoringModuleStatistics(particle)
632
633 self.time_per_channel = self.module_statistic.channel_time
634
635 self.time_per_channel_per_module = self.module_statistic.channel_time_per_module
636
637 self.total_time = self.module_statistic.particle_time + sum(self.time_per_channel.values())
638
639
640 self.total_number_of_channels = len(self.particle.channels)
641
642 self.reconstructed_number_of_channels = 0
643
644
645 self.branching_fractions = MonitoringBranchingFractions()
646
647 self.exc_br_per_channel = self.branching_fractions.getExclusive(particle)
648
649 self.inc_br_per_channel = self.branching_fractions.getInclusive(particle)
650
651
652 self.before_ranking = {}
653
654 self.after_ranking = {}
655
656 self.after_vertex = {}
657
658 self.after_classifier = {}
659
660 self.training_data = {}
661
662 self.ignored_channels = {}
663
664 for channel in self.particle.channels:
665 hist = MonitoringHist('Monitor_PreReconstruction_BeforeRanking.root', f'{channel.label}')
666 self.before_ranking[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
667 hist = MonitoringHist('Monitor_PreReconstruction_AfterRanking.root', f'{channel.label}')
668 self.after_ranking[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
669 hist = MonitoringHist('Monitor_PreReconstruction_AfterVertex.root', f'{channel.label}')
670 self.after_vertex[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
671 hist = MonitoringHist('Monitor_PostReconstruction_AfterMVA.root', f'{channel.label}')
672 self.after_classifier[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
673 if hist.valid and hist.sum(channel.mvaConfig.target) > 0:
674 self.reconstructed_number_of_channels += 1
675 self.ignored_channels[channel.label] = False
676 else:
677 self.ignored_channels[channel.label] = True
678 hist = MonitoringHist('Monitor_TrainingData.root', f'{channel.label}')
679 self.training_data[channel.label] = hist
680
681 plist = removeJPsiSlash(particle.identifier)
682 hist = MonitoringHist('Monitor_PostReconstruction_BeforePostCut.root', f'{plist}')
683
684 self.before_postcut = self.calculateStatistic(hist, self.particle.mvaConfig.target)
685 hist = MonitoringHist('Monitor_PostReconstruction_BeforeRanking.root', f'{plist}')
686
687 self.before_ranking_postcut = self.calculateStatistic(hist, self.particle.mvaConfig.target)
688 hist = MonitoringHist('Monitor_PostReconstruction_AfterRanking.root', f'{plist}')
689
690 self.after_ranking_postcut = self.calculateStatistic(hist, self.particle.mvaConfig.target)
691
692 self.before_tag = self.calculateStatistic(hist, self.particle.mvaConfig.target)
693
694 self.final_ntuple = MonitoringNTuple('Monitor_Final.root', f'{plist}')
695
696 self.after_tag = self.calculateUniqueStatistic(self.final_ntuple.tree)
697
698 def calculateStatistic(self, hist, target):
699 """
700 Calculate Statistic object where all signal candidates are considered signal
701 """
702 nTrueSig = self.mc_count['sum']
703 if not hist.valid:
704 return Statistic(nTrueSig, 0, 0)
705 signal_bins = (hist.centers[target] > 0.5)
706 bckgrd_bins = ~signal_bins
707 nSig = hist.values[target][signal_bins].sum()
708 nBg = hist.values[target][bckgrd_bins].sum()
709 return Statistic(nTrueSig, nSig, nBg)
710
711 def calculateUniqueStatistic(self, tree):
712 """
713 Calculate Static object where only unique signal candidates are considered signal
714 """
715 nTrueSig = self.mc_count['sum']
716 if not tree:
717 return Statistic(nTrueSig, 0, 0)
718
719 nSig, nBg = 0, 0
720 for entry in tree:
721 if getattr(entry, "extraInfo__bouniqueSignal__bc") == 1:
722 nSig += 1
723 else:
724 nBg += 1
725 return Statistic(nTrueSig, nSig, nBg)
726
727# @endcond
chain2dict(chain, tree_columns, dict_columns=None, max_entries=None)
STL namespace.
from_name(name)
Definition pdg.py:63
conjugate(name)
Definition pdg.py:111
get(name)
Definition pdg.py:48