Belle II Software light-2406-ragdoll
monitoring.py
1#!/usr/bin/env python
2
3
10
11# @cond SUPPRESS_DOXYGEN
12
13"""
14 Contains classes to read in the monitoring output
15 and some simple plotting routines.
16
17 This is used by printReporting.py and latexReporting.py
18 to create summaries for a FEI training or application.
19"""
20
21try:
22 from generators import get_default_decayfile
23except ModuleNotFoundError:
24 print("MonitoringBranchingFractions won't work.")
25from basf2_mva_evaluation import plotting
26import basf2_mva_util
27import pickle
28import copy
29import math
30import os
31import numpy as np
32import pdg
33
34
35def removeJPsiSlash(string):
36 """ Remove slashes in a string, which is not allowed for filenames. """
37 return string.replace('/', '')
38
39
40def load_config():
41 """ Load the FEI configuration from the Summary.pickle file. """
42 if not os.path.isfile('Summary.pickle'):
43 raise RuntimeError("""Could not find Summary.pickle!
44 This file is automatically created by the FEI training.
45 But you can also create it yourself using:
46 pickle.dump((particles, configuration), open('Summary.pickle', 'wb'))""")
47 return pickle.load(open('Summary.pickle', 'rb'))
48
49
50class Statistic:
51 """
52 This class provides the efficiency, purity and other quantities for a given number of true signal candidates, signal candidates and background candidates
53 """
54
55 def __init__(self, nTrueSig, nSig, nBg):
56 """
57 Create a new Statistic object
58 @param nTrueSig the number of true signal particles
59 @param nSig the number of reconstructed signal candidates
60 @param nBg the number of reconstructed background candidates
61 """
62
63 self.nTrueSig = nTrueSig
64
65 self.nSig = nSig
66
67 self.nBg = nBg
68
69 @property
70 def nTotal(self):
71 """ Returns total number of reconstructed candidates. """
72 return self.nSig + self.nBg
73
74 @property
75 def purity(self):
76 """ Returns the purity of the reconstructed candidates. """
77 if self.nSig == 0:
78 return 0.0
79 if self.nTotal == 0:
80 return 0.0
81 return self.nSig / float(self.nTotal)
82
83 @property
84 def efficiency(self):
85 """ Returns the efficiency of the reconstructed signal candidates with respect to the number of true signal particles. """
86 if self.nSig == 0:
87 return 0.0
88 if self.nTrueSig == 0:
89 return float('inf')
90 return self.nSig / float(self.nTrueSig)
91
92 @property
93 def purityError(self):
94 """ Returns the uncertainty of the purity. """
95 if self.nTotal == 0:
96 return 0.0
97 return self.calcStandardDeviation(self.nSig, self.nTotal)
98
99 @property
100 def efficiencyError(self):
101 """
102 Returns the uncertainty of the efficiency.
103 For an efficiency eps = self.nSig/self.nTrueSig, this function calculates the
104 standard deviation according to http://arxiv.org/abs/physics/0701199 .
105 """
106 if self.nTrueSig == 0:
107 return float('inf')
108 return self.calcStandardDeviation(self.nSig, self.nTrueSig)
109
110 def calcStandardDeviation(self, k, n):
111 """ Helper method to calculate the standard deviation for efficiencies. """
112 k = float(k)
113 n = float(n)
114 variance = (k + 1) * (k + 2) / ((n + 2) * (n + 3)) - (k + 1) ** 2 / ((n + 2) ** 2)
115 if variance <= 0:
116 return 0.0
117 return math.sqrt(variance)
118
119 def __str__(self):
120 """ Returns a string representation of a Statistic object. """
121 o = f"nTrueSig {self.nTrueSig} nSig {self.nSig} nBg {self.nBg}\n"
122 o += f"Efficiency {self.efficiency:.3f} ({self.efficiencyError:.3f})\n"
123 o += f"Purity {self.purity:.3f} ({self.purityError:.3f})\n"
124 return o
125
126 def __add__(self, a):
127 """ Adds two Statistics objects and returns a new object. """
128 return Statistic(self.nTrueSig, self.nSig + a.nSig, self.nBg + a.nBg)
129
130 def __radd__(self, a):
131 """
132 Returns a new Statistic object if the current one is added to zero.
133 Necessary to apply sum-function to Statistic objects.
134 """
135 if a != 0:
136 return NotImplemented
137 return Statistic(self.nTrueSig, self.nSig, self.nBg)
138
139
140class MonitoringHist:
141 """
142 Reads all TH1F and TH2F from a ROOT file
143 and puts them into a more accessible format.
144 """
145
146 def __init__(self, filename, dirname):
147 """
148 Reads histograms from the given file
149 @param filename the name of the ROOT file
150 """
151 # Always avoid the top-level 'import ROOT'.
152 import ROOT # noqa
153
154 self.values = {}
155
156 self.centers = {}
157
158 self.nbins = {}
159
160 self.valid = os.path.isfile(filename)
161
162 if not self.valid:
163 return
164
165 f = ROOT.TFile.Open(filename, 'read')
166 d = f.Get(ROOT.Belle2.MakeROOTCompatible.makeROOTCompatible(dirname))
167
168 for key in d.GetListOfKeys():
169 name = ROOT.Belle2.MakeROOTCompatible.invertMakeROOTCompatible(key.GetName())
170 hist = key.ReadObj()
171 if not (isinstance(hist, ROOT.TH1D) or isinstance(hist, ROOT.TH1F) or
172 isinstance(hist, ROOT.TH2D) or isinstance(hist, ROOT.TH2F)):
173 continue
174 two_dimensional = isinstance(hist, ROOT.TH2D) or isinstance(hist, ROOT.TH2F)
175 if two_dimensional:
176 nbins = (hist.GetNbinsX(), hist.GetNbinsY())
177 self.centers[name] = np.array([[hist.GetXaxis().GetBinCenter(i) for i in range(nbins[0] + 2)],
178 [hist.GetYaxis().GetBinCenter(i) for i in range(nbins[1] + 2)]])
179 self.values[name] = np.array([[hist.GetBinContent(i, j) for i in range(nbins[0] + 2)] for j in range(nbins[1] + 2)])
180 self.nbins[name] = nbins
181 else:
182 nbins = hist.GetNbinsX()
183 self.centers[name] = np.array([hist.GetBinCenter(i) for i in range(nbins + 2)])
184 self.values[name] = np.array([hist.GetBinContent(i) for i in range(nbins + 2)])
185 self.nbins[name] = nbins
186
187 def sum(self, name):
188 """
189 Calculates the sum of a given histogram (== sum of all entries)
190 @param name key of the histogram
191 """
192 if name not in self.centers:
193 return np.nan
194 return np.sum(self.values[name])
195
196 def mean(self, name):
197 """
198 Calculates the mean of a given histogram
199 @param name key of the histogram
200 """
201 if name not in self.centers:
202 return np.nan
203 return np.average(self.centers[name], weights=self.values[name])
204
205 def std(self, name):
206 """
207 Calculates the standard deviation of a given histogram
208 @param name key of the histogram
209 """
210 if name not in self.centers:
211 return np.nan
212 avg = np.average(self.centers[name], weights=self.values[name])
213 return np.sqrt(np.average((self.centers[name] - avg)**2, weights=self.values[name]))
214
215 def min(self, name):
216 """
217 Calculates the minimum of a given histogram
218 @param name key of the histogram
219 """
220 if name not in self.centers:
221 return np.nan
222 nonzero = np.nonzero(self.values[name])[0]
223 if len(nonzero) == 0:
224 return np.nan
225 return self.centers[name][nonzero[0]]
226
227 def max(self, name):
228 """
229 Calculates the maximum of a given histogram
230 @param name key of the histogram
231 """
232 if name not in self.centers:
233 return np.nan
234 nonzero = np.nonzero(self.values[name])[0]
235 if len(nonzero) == 0:
236 return np.nan
237 return self.centers[name][nonzero[-1]]
238
239
240class MonitoringNTuple:
241 """
242 Reads the ntuple named variables from a ROOT file
243 """
244
245 def __init__(self, filename, treenameprefix):
246 """
247 Reads ntuple from the given file
248 @param filename the name of the ROOT file
249 """
250 # Always avoid the top-level 'import ROOT'.
251 import ROOT # noqa
252
253 self.valid = os.path.isfile(filename)
254 if not self.valid:
255 return
256
257 self.f = ROOT.TFile.Open(filename, 'read')
258
259 self.tree = self.f.Get(f'{treenameprefix} variables')
260
261 self.filename = filename
262
263
264class MonitoringModuleStatistics:
265 """
266 Reads the module statistics for a single particle from the outputted root file
267 and puts them into a more accessible format
268 """
269
270 def __init__(self, particle):
271 """
272 Reads the module statistics from the file named Monitor_ModuleStatistics.root
273 @param particle the particle for which the statistics are read
274 """
275 # Always avoid the top-level 'import ROOT'.
276 import ROOT # noqa
277 root_file = ROOT.TFile.Open('Monitor_ModuleStatistics.root', 'read')
278 persistentTree = root_file.Get('persistent')
279 persistentTree.GetEntry(0)
280 # Clone() needed so we actually own the object (original dies when tfile is deleted)
281 stats = persistentTree.ProcessStatistics.Clone()
282
283 # merge statistics from all persistent trees into 'stats'
284 numEntries = persistentTree.GetEntriesFast()
285 for i in range(1, numEntries):
286 persistentTree.GetEntry(i)
287 stats.merge(persistentTree.ProcessStatistics)
288
289 # TODO .getTimeSum returns always 0 at the moment ?!
290 statistic = {m.getName(): m.getTimeSum(m.c_Event) / 1e9 for m in stats.getAll()}
291
292
293 self.channel_time = {}
294
295 self.channel_time_per_module = {}
296 for channel in particle.channels:
297 if channel.label not in self.channel_time:
298 self.channel_time[channel.label] = 0.0
299 self.channel_time_per_module[channel.label] = {'ParticleCombiner': 0.0,
300 'BestCandidateSelection': 0.0,
301 'PListCutAndCopy': 0.0,
302 'VariablesToExtraInfo': 0.0,
303 'MCMatch': 0.0,
304 'ParticleSelector': 0.0,
305 'MVAExpert': 0.0,
306 'ParticleVertexFitter': 0.0,
307 'TagUniqueSignal': 0.0,
308 'VariablesToHistogram': 0.0,
309 'VariablesToNtuple': 0.0}
310 for key, time in statistic.items():
311 if(channel.decayString in key or channel.name in key):
312 self.channel_time[channel.label] += time
313 for k in self.channel_time_per_module[channel.label]:
314 if k in key:
315 self.channel_time_per_module[channel.label][k] += time
316
317
318 self.particle_time = 0
319 for key, time in statistic.items():
320 if particle.identifier in key:
321 self.particle_time += time
322
323
324def MonitorCosBDLPlot(particle, filename):
325 """ Creates a CosBDL plot using ROOT. """
326 if not particle.final_ntuple.valid:
327 return
328 df = basf2_mva_util.tree2dict(particle.final_ntuple.tree,
329 ['extraInfo__bouniqueSignal__bc', 'cosThetaBetweenParticleAndNominalB',
330 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
331 ['unique', 'cosThetaBDl', 'probability', 'signal'])
332 for i, cut in enumerate([0.0, 0.01, 0.05, 0.1, 0.2, 0.5]):
333 p = plotting.VerboseDistribution(range_in_std=5.0)
334 common = (np.abs(df['cosThetaBDl']) < 10) & (df['probability'] >= cut)
335 df = df[common]
336 p.add(df, 'cosThetaBDl', (df['signal'] == 1), label="Signal")
337 p.add(df, 'cosThetaBDl', (df['signal'] == 0), label="Background")
338 p.finish()
339 p.axis.set_title(f"Cosine of Theta between B and Dl system for signal probability >= {cut:.2f}")
340 p.axis.set_xlabel("CosThetaBDl")
341 p.save(f'{filename}_{i}.png')
342
343
344def MonitorMbcPlot(particle, filename):
345 """ Creates a Mbc plot using ROOT. """
346 if not particle.final_ntuple.valid:
347 return
348 df = basf2_mva_util.tree2dict(particle.final_ntuple.tree,
349 ['extraInfo__bouniqueSignal__bc', 'Mbc',
350 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
351 ['unique', 'Mbc', 'probability', 'signal'])
352 for i, cut in enumerate([0.0, 0.01, 0.05, 0.1, 0.2, 0.5]):
353 p = plotting.VerboseDistribution(range_in_std=5.0)
354 common = (df['Mbc'] > 5.23) & (df['probability'] >= cut)
355 df = df[common]
356 p.add(df, 'Mbc', (df['signal'] == 1), label="Signal")
357 p.add(df, 'Mbc', (df['signal'] == 0), label="Background")
358 p.finish()
359 p.axis.set_title(f"Beam constrained mass for signal probability >= {cut:.2f}")
360 p.axis.set_xlabel("Mbc")
361 p.save(f'{filename}_{i}.png')
362
363
364def MonitorROCPlot(particle, filename):
365 """ Creates a ROC plot using ROOT. """
366 if not particle.final_ntuple.valid:
367 return
368 df = basf2_mva_util.tree2dict(particle.final_ntuple.tree,
369 ['extraInfo__bouniqueSignal__bc',
370 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
371 ['unique', 'probability', 'signal'])
373 p.add(df, 'probability', df['signal'] == 1, df['signal'] == 0, label='All')
374 p.finish()
375 p.save(filename + '.png')
376
377
378def MonitorDiagPlot(particle, filename):
379 """ Creates a Diagonal plot using ROOT. """
380 if not particle.final_ntuple.valid:
381 return
382 df = basf2_mva_util.tree2dict(particle.final_ntuple.tree,
383 ['extraInfo__bouniqueSignal__bc',
384 'extraInfo__boSignalProbability__bc', particle.particle.mvaConfig.target],
385 ['unique', 'probability', 'signal'])
387 p.add(df, 'probability', df['signal'] == 1, df['signal'] == 0)
388 p.finish()
389 p.save(filename + '.png')
390
391
392def MonitoringMCCount(particle):
393 """
394 Reads the MC Counts for a given particle from the ROOT file mcParticlesCount.root
395 @param particle the particle for which the MC counts are read
396 @return dictionary with 'sum', 'std', 'avg', 'max', and 'min'
397 """
398 # Always avoid the top-level 'import ROOT'.
399 import ROOT # noqa
400 root_file = ROOT.TFile.Open('mcParticlesCount.root', 'read')
401
402 key = f'NumberOfMCParticlesInEvent({abs(pdg.from_name(particle.name))})'
403
404 key = ROOT.Belle2.MakeROOTCompatible.makeROOTCompatible(key)
405 hist = root_file.Get(key)
406
407 mc_counts = {'sum': 0, 'std': 0, 'avg': 0, 'min': 0, 'max': 0}
408 if hist:
409 mc_counts['sum'] = sum(hist.GetXaxis().GetBinCenter(bin + 1) * hist.GetBinContent(bin + 1)
410 for bin in range(hist.GetNbinsX()))
411 mc_counts['std'] = hist.GetStdDev()
412 mc_counts['avg'] = hist.GetMean()
413 mc_counts['max'] = hist.GetXaxis().GetBinCenter(hist.FindLastBinAbove(0.0))
414 mc_counts['min'] = hist.GetXaxis().GetBinCenter(hist.FindFirstBinAbove(0.0))
415 return mc_counts
416
417
418class MonitoringBranchingFractions:
419 """ Class extracts the branching fractions of a decay channel from the DECAY.DEC file. """
420
421 _shared = None
422
423 def __init__(self):
424 """
425 Create a new MonitoringBranchingFraction object.
426 The extracted branching fractions are cached, hence creating more than one object does not do anything.
427 """
428 if MonitoringBranchingFractions._shared is None:
429 decay_file = get_default_decayfile()
430
431 self.exclusive_branching_fractions = self.loadExclusiveBranchingFractions(decay_file)
432
433 self.inclusive_branching_fractions = self.loadInclusiveBranchingFractions(self.exclusive_branching_fractions)
434 MonitoringBranchingFractions._shared = (self.exclusive_branching_fractions, self.inclusive_branching_fractions)
435 else:
436 self.exclusive_branching_fractions, self.inclusive_branching_fractions = MonitoringBranchingFractions._shared
437
438 def getExclusive(self, particle):
439 """ Returns the exclusive (i.e. without the branching fractions of the daughters) branching fraction of a particle. """
440 return self.getBranchingFraction(particle, self.exclusive_branching_fractions)
441
442 def getInclusive(self, particle):
443 """ Returns the inclusive (i.e. including all branching fractions of the daughters) branching fraction of a particle. """
444 return self.getBranchingFraction(particle, self.inclusive_branching_fractions)
445
446 def getBranchingFraction(self, particle, branching_fractions):
447 """ Returns the branching fraction of a particle given a branching_fraction table. """
448 result = {c.label: 0.0 for c in particle.channels}
449 name = particle.name
450 channels = [tuple(sorted(d.split(':')[0] for d in channel.daughters)) for channel in particle.channels]
451 if name not in branching_fractions:
452 name = pdg.conjugate(name)
453 channels = [tuple(pdg.conjugate(d) for d in channel) for channel in channels]
454 if name not in branching_fractions:
455 return result
456 for c, key in zip(particle.channels, channels):
457 if key in branching_fractions[name]:
458 result[c.label] = branching_fractions[name][key]
459 return result
460
461 def loadExclusiveBranchingFractions(self, filename):
462 """
463 Load branching fraction from MC decay-file.
464 """
465
466 def isFloat(element):
467 """ Checks if element is a convertible to float"""
468 try:
469 float(element)
470 return True
471 except ValueError:
472 return False
473
474 def isValidParticle(element):
475 """ Checks if element is a valid pdg name for a particle"""
476 try:
477 pdg.from_name(element)
478 return True
479 except LookupError:
480 return False
481
482 branching_fractions = {'UNKOWN': {}}
483
484 mother = 'UNKOWN'
485 with open(filename) as f:
486 for line in f:
487 fields = line.split(' ')
488 fields = [x for x in fields if x != '']
489 if len(fields) < 2 or fields[0][0] == '#':
490 continue
491 if fields[0] == 'Decay':
492 mother = fields[1].strip()
493 if not isValidParticle(mother):
494 mother = 'UNKOWN'
495 continue
496 if fields[0] == 'Enddecay':
497 mother = 'UNKOWN'
498 continue
499 if mother == 'UNKOWN':
500 continue
501 fields = fields[:-1]
502 if len(fields) < 1 or not isFloat(fields[0]):
503 continue
504 while len(fields) > 1:
505 if isValidParticle(fields[-1]):
506 break
507 fields = fields[:-1]
508 if len(fields) < 1 or not all(isValidParticle(p) for p in fields[1:]):
509 continue
510 neutrinoTag_list = ['nu_e', 'nu_mu', 'nu_tau', 'anti-nu_e', 'anti-nu_mu', 'anti-nu_tau']
511 daughters = tuple(sorted(p for p in fields[1:] if p not in neutrinoTag_list))
512 if mother not in branching_fractions:
513 branching_fractions[mother] = {}
514 if daughters not in branching_fractions[mother]:
515 branching_fractions[mother][daughters] = 0.0
516 branching_fractions[mother][daughters] += float(fields[0])
517
518 del branching_fractions['UNKOWN']
519 return branching_fractions
520
521 def loadInclusiveBranchingFractions(self, exclusive_branching_fractions):
522 """
523 Get covered branching fraction of a particle using a recursive algorithm
524 and the given exclusive branching_fractions (given as Hashable List)
525 @param particle identifier of the particle
526 @param branching_fractions
527 """
528 particles = set(exclusive_branching_fractions.keys())
529 particles.update({pdg.conjugate(p) for p in particles if p != pdg.conjugate(p)})
530 particles = sorted(particles, key=lambda x: pdg.get(x).Mass())
531 inclusive_branching_fractions = copy.deepcopy(exclusive_branching_fractions)
532
533 for p in particles:
534 if p in inclusive_branching_fractions:
535 br = sum(inclusive_branching_fractions[p].values())
536 else:
537 br = sum(inclusive_branching_fractions[pdg.conjugate(p)].values())
538 for p_br in inclusive_branching_fractions.values():
539 for c in p_br:
540 for i in range(c.count(p)):
541 p_br[c] *= br
542 return inclusive_branching_fractions
543
544
545class MonitoringParticle:
546 """
547 Monitoring object containing all the monitoring information
548 about a single particle
549 """
550
551 def __init__(self, particle):
552 """
553 Read the monitoring information of the given particle
554 @param particle the particle for which the information is read
555 """
556
557 self.particle = particle
558
559 self.mc_count = MonitoringMCCount(particle)
560
561 self.module_statistic = MonitoringModuleStatistics(particle)
562
563 self.time_per_channel = self.module_statistic.channel_time
564
565 self.time_per_channel_per_module = self.module_statistic.channel_time_per_module
566
567 self.total_time = self.module_statistic.particle_time + sum(self.time_per_channel.values())
568
569
570 self.total_number_of_channels = len(self.particle.channels)
571
572 self.reconstructed_number_of_channels = 0
573
574
575 self.branching_fractions = MonitoringBranchingFractions()
576
577 self.exc_br_per_channel = self.branching_fractions.getExclusive(particle)
578
579 self.inc_br_per_channel = self.branching_fractions.getInclusive(particle)
580
581
582 self.before_ranking = {}
583
584 self.after_ranking = {}
585
586 self.after_vertex = {}
587
588 self.after_classifier = {}
589
590 self.training_data = {}
591
592 self.ignored_channels = {}
593
594 for channel in self.particle.channels:
595 hist = MonitoringHist('Monitor_PreReconstruction_BeforeRanking.root', f'{channel.label}')
596 self.before_ranking[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
597 hist = MonitoringHist('Monitor_PreReconstruction_AfterRanking.root', f'{channel.label}')
598 self.after_ranking[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
599 hist = MonitoringHist('Monitor_PreReconstruction_AfterVertex.root', f'{channel.label}')
600 self.after_vertex[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
601 hist = MonitoringHist('Monitor_PostReconstruction_AfterMVA.root', f'{channel.label}')
602 self.after_classifier[channel.label] = self.calculateStatistic(hist, channel.mvaConfig.target)
603 if hist.valid and hist.sum(channel.mvaConfig.target) > 0:
604 self.reconstructed_number_of_channels += 1
605 self.ignored_channels[channel.label] = False
606 else:
607 self.ignored_channels[channel.label] = True
608 hist = MonitoringHist('Monitor_TrainingData.root', f'{channel.label}')
609 self.training_data[channel.label] = hist
610
611 plist = removeJPsiSlash(particle.identifier)
612 hist = MonitoringHist('Monitor_PostReconstruction_BeforePostCut.root', f'{plist}')
613
614 self.before_postcut = self.calculateStatistic(hist, self.particle.mvaConfig.target)
615 hist = MonitoringHist('Monitor_PostReconstruction_BeforeRanking.root', f'{plist}')
616
617 self.before_ranking_postcut = self.calculateStatistic(hist, self.particle.mvaConfig.target)
618 hist = MonitoringHist('Monitor_PostReconstruction_AfterRanking.root', f'{plist}')
619
620 self.after_ranking_postcut = self.calculateStatistic(hist, self.particle.mvaConfig.target)
621
622 self.before_tag = self.calculateStatistic(hist, self.particle.mvaConfig.target)
623
624 self.after_tag = self.calculateUniqueStatistic(hist)
625
626 self.final_ntuple = MonitoringNTuple('Monitor_Final.root', f'{plist}')
627
628 def calculateStatistic(self, hist, target):
629 """
630 Calculate Statistic object where all signal candidates are considered signal
631 """
632 nTrueSig = self.mc_count['sum']
633 if not hist.valid:
634 return Statistic(nTrueSig, 0, 0)
635 signal_bins = (hist.centers[target] > 0.5)
636 bckgrd_bins = ~signal_bins
637 nSig = hist.values[target][signal_bins].sum()
638 nBg = hist.values[target][bckgrd_bins].sum()
639 return Statistic(nTrueSig, nSig, nBg)
640
641 def calculateUniqueStatistic(self, hist):
642 """
643 Calculate Static object where only unique signal candidates are considered signal
644 """
645 nTrueSig = self.mc_count['sum']
646 if not hist.valid:
647 return Statistic(nTrueSig, 0, 0)
648 signal_bins = hist.centers['extraInfo(uniqueSignal)'] > 0.5
649 bckgrd_bins = hist.centers['extraInfo(uniqueSignal)'] <= 0.5
650 nSig = hist.values['extraInfo(uniqueSignal)'][signal_bins].sum()
651 nBg = hist.values['extraInfo(uniqueSignal)'][bckgrd_bins].sum()
652 return Statistic(nTrueSig, nSig, nBg)
653
654# @endcond
655
STL namespace.
def from_name(name)
Definition: pdg.py:63
def conjugate(name)
Definition: pdg.py:111
def get(name)
Definition: pdg.py:48