Belle II Software  release-06-01-15
plotPXDPositionEstimator.py
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*-
3 
4 
11 
12 """
13 Plot PXD position estimator payload
14 """
15 
16 import numpy as np
17 import matplotlib.pyplot as plt
18 import pandas as pd
19 import seaborn as sns
20 
21 import basf2 as b2
22 from ROOT import Belle2
23 
24 
26  """Plot the PXDClusterPositionEstimator playload """
27 
28  def __init__(self, resultdir):
29  """Initialize"""
30  super().__init__() # don't forget to call parent constructor
31 
32  self.position_estimatorposition_estimator = Belle2.PyDBObj('PXDClusterPositionEstimatorPar')
33 
34  self.resultdirresultdir = resultdir
35 
36  self.countercounter = 0
37 
38  def event(self):
39  """Plot position payload in case it has changed"""
40 
41  # We plot the payload whenever it changes
42  if self.position_estimatorposition_estimator.hasChanged():
43  b2.B2INFO("PXDClusterPositionEstimator payload has changed. Plot the new payload.")
44  self.plotplot()
45  self.countercounter += 1
46 
47  def average_covariance(self, shape_classifier):
48  """Compute the average covariance for a shape classifier"""
49  flat_covs = []
50  weights = []
51 
52  offsetMap = shape_classifier.getOffsetMap()
53  likelyhoodMap = shape_classifier.getLikelyhoodMap()
54 
55  for item in offsetMap:
56  shape_index = item.first
57  offsets = item.second
58 
59  for eta_index, offset in enumerate(offsets):
60  likelyhood = likelyhoodMap[shape_index][eta_index]
61  flat_covs.append([offset.getUSigma2(), offset.getVSigma2(), offset.getUVCovariance()])
62  weights.append(likelyhood)
63 
64  weights = np.array(weights)
65  flat_covs = np.array(flat_covs)
66  flat_average = np.average(flat_covs, axis=0, weights=weights)
67  return np.array([[flat_average[0], flat_average[2]], [flat_average[2], flat_average[1]]])
68 
69  def plot(self):
70  """Plot position estimator payload"""
71  for pair in self.position_estimatorposition_estimator.getGridMap():
72  pixelkind = pair.first
73  grid = pair.second
74 
75  # Keep some numbers to gain overview
76  summary_dict = {'shapes': [],
77  'corrections': [],
78  'coverage': [],
79  'sigma_u': [],
80  'sigma_v': [],
81  'corr_uv': [],
82  'thetaU': [],
83  'thetaV': [],
84  'pixelkind': [],
85  }
86 
87  # Loop over angular grid and plot shape classifiers
88  for uBin in range(1, grid.GetXaxis().GetNbins() + 1):
89  for vBin in range(1, grid.GetYaxis().GetNbins() + 1):
90 
91  # Shape classifier for angle bin
92  shape_classifier = self.position_estimatorposition_estimator.getShapeClassifier(uBin, vBin, pixelkind)
93 
94  # Bin is centered around angles
95  thetaU = grid.GetXaxis().GetBinCenter(uBin)
96  thetaV = grid.GetYaxis().GetBinCenter(vBin)
97 
98  # Create plots for classifier
99  stats = self.get_classifier_statsget_classifier_stats(shape_classifier, pixelkind)
100 
101  # Fill summary dict
102  summary_dict['thetaU'].append(thetaU)
103  summary_dict['thetaV'].append(thetaV)
104  summary_dict['pixelkind'].append(pixelkind)
105  summary_dict['shapes'].append(stats['shapes'])
106  summary_dict['corrections'].append(stats['corrections'])
107  summary_dict['coverage'].append(stats['coverage'])
108  summary_dict['sigma_u'].append(stats['sigma_u'] * 10000) # cm -> um
109  summary_dict['sigma_v'].append(stats['sigma_v'] * 10000) # cm -> um
110  summary_dict['corr_uv'].append(stats['corr_uv'])
111 
112  # Create heatmaps to gain overview
113  df = pd.DataFrame(summary_dict)
114 
115  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='corrections')
116  fig = plt.figure(figsize=(12, 12))
117  ax = fig.add_subplot(111)
118  ax.set_xlabel('thetaU / degree', size=20)
119  ax.set_ylabel('thetaV / degree', size=20)
120  ax.set_title('Number of corrections kind={:d}'.format(pixelkind), size=20)
121  ax = sns.heatmap(
122  pivot_table,
123  mask=pivot_table.isnull(),
124  annot=True,
125  fmt="d",
126  linewidths=.5,
127  square=True,
128  cmap='Blues_r',
129  cbar_kws={
130  'label': '#corrections'})
131  ax.invert_yaxis()
132  fig.savefig(self.resultdirresultdir + '/Corrections_Heatmap_kind_{:d}.png'.format(pixelkind), dpi=100)
133  fig.clf()
134  plt.close(fig)
135 
136  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='shapes')
137  fig = plt.figure(figsize=(12, 12))
138  ax = fig.add_subplot(111)
139  ax.set_xlabel('thetaU / degree', size=20)
140  ax.set_ylabel('thetaV / degree', size=20)
141  ax.set_title('Number of shapes kind={:d}'.format(pixelkind), size=20)
142  ax = sns.heatmap(
143  pivot_table,
144  mask=pivot_table.isnull(),
145  annot=True,
146  fmt="d",
147  linewidths=.5,
148  square=True,
149  cmap='Blues_r',
150  cbar_kws={
151  'label': '#shapes'})
152  ax.invert_yaxis()
153  fig.savefig(self.resultdirresultdir + '/Shapes_Heatmap_kind_{:d}.png'.format(pixelkind), dpi=100)
154  fig.clf()
155  plt.close(fig)
156 
157  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='coverage')
158  fig = plt.figure(figsize=(12, 12))
159  ax = fig.add_subplot(111)
160  ax.set_xlabel('thetaU / degree', size=20)
161  ax.set_ylabel('thetaV / degree', size=20)
162  ax.set_title('Coverage kind={:d}'.format(pixelkind), size=20)
163  ax = sns.heatmap(
164  pivot_table,
165  mask=pivot_table.isnull(),
166  annot=True,
167  fmt=".1f",
168  linewidths=.5,
169  square=True,
170  cmap='Blues_r',
171  cbar_kws={
172  'label': 'coverage / %'})
173  ax.invert_yaxis()
174  fig.savefig(self.resultdirresultdir + '/Coverage_Heatmap_kind_{:d}.png'.format(pixelkind), dpi=100)
175  fig.clf()
176  plt.close(fig)
177 
178  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='sigma_u')
179  fig = plt.figure(figsize=(12, 12))
180  ax = fig.add_subplot(111)
181  ax.set_xlabel('thetaU / degree', size=20)
182  ax.set_ylabel('thetaV / degree', size=20)
183  ax.set_title('Average cluster sigma u kind={:d}'.format(pixelkind), size=20)
184  ax = sns.heatmap(
185  pivot_table,
186  mask=pivot_table.isnull(),
187  annot=True,
188  fmt=".1f",
189  linewidths=.5,
190  square=True,
191  cmap='Blues_r',
192  cbar_kws={
193  'label': 'sigma u / um'})
194  ax.invert_yaxis()
195  fig.savefig(self.resultdirresultdir + '/SigmaU_Heatmap_kind_{:d}.png'.format(pixelkind), dpi=100)
196  fig.clf()
197  plt.close(fig)
198 
199  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='sigma_v')
200  fig = plt.figure(figsize=(12, 12))
201  ax = fig.add_subplot(111)
202  ax.set_xlabel('thetaU / degree', size=20)
203  ax.set_ylabel('thetaV / degree', size=20)
204  ax.set_title('Average cluster sigma v kind={:d}'.format(pixelkind), size=20)
205  ax = sns.heatmap(
206  pivot_table,
207  mask=pivot_table.isnull(),
208  annot=True,
209  fmt=".1f",
210  linewidths=.5,
211  square=True,
212  cmap='Blues_r',
213  cbar_kws={
214  'label': 'sigma v / um'})
215  ax.invert_yaxis()
216  fig.savefig(self.resultdirresultdir + '/SigmaV_Heatmap_kind_{:d}.png'.format(pixelkind), dpi=100)
217  fig.clf()
218  plt.close(fig)
219 
220  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='corr_uv')
221  fig = plt.figure(figsize=(12, 12))
222  ax = fig.add_subplot(111)
223  ax.set_xlabel('thetaU / degree', size=20)
224  ax.set_ylabel('thetaV / degree', size=20)
225  ax.set_title('Average uv correlation kind={:d}'.format(pixelkind), size=20)
226  ax = sns.heatmap(
227  pivot_table,
228  mask=pivot_table.isnull(),
229  annot=True,
230  fmt=".1f",
231  linewidths=.5,
232  square=True,
233  cmap='Blues_r',
234  cbar_kws={
235  'label': 'correlation'})
236  ax.invert_yaxis()
237  fig.savefig(self.resultdirresultdir + '/CorrelationUV_Heatmap_kind_{:d}.png'.format(pixelkind), dpi=100)
238  fig.clf()
239  plt.close(fig)
240 
241  def get_classifier_stats(self, shape_classifier, pixelkind):
242  """Compute some statistics for a shape classifier"""
243  # Read corrections data
244  offsetMap = shape_classifier.getOffsetMap()
245  likelyhoodMap = shape_classifier.getLikelyhoodMap()
246 
247  # Some counters
248  nCorrections = 0
249  nShapes = 0
250  coverage = 0.0
251 
252  for item in offsetMap:
253  shape_index = item.first
254  offsets_array = item.second
255 
256  nShapes += 1
257 
258  for eta_index, offset in enumerate(offsets_array):
259  coverage += likelyhoodMap[shape_index][eta_index]
260  nCorrections += 1
261 
262  if nShapes > 0:
263  # Average covariance of all hits in classifier
264  cov = self.average_covarianceaverage_covariance(shape_classifier)
265  sigma_u = np.sqrt(cov[0, 0])
266  sigma_v = np.sqrt(cov[1, 1])
267  corr_uv = cov[0, 1] / (np.sqrt(cov[0, 0]) * np.sqrt(cov[1, 1]))
268  else:
269  sigma_u = 0.0
270  sigma_v = 0.0
271  corr_uv = 0.0
272 
273  # Create some summary statistics
274  stats = {}
275  stats['shapes'] = nShapes
276  stats['corrections'] = nCorrections
277  stats['coverage'] = 100 * coverage
278  stats['sigma_u'] = sigma_u
279  stats['sigma_v'] = sigma_v
280  stats['corr_uv'] = corr_uv
281 
282  # return some statistics
283  return stats
284 
285 
286 if __name__ == "__main__":
287 
288  import argparse
289  import os
290  import shutil
291  parser = argparse.ArgumentParser(description="Plot summary of hit estimator")
292  parser.add_argument('--resultdir', default="results", type=str, help='Put all plots in this directory')
293  args = parser.parse_args()
294 
295  # Remove old stuff
296  if os.path.isdir(os.getcwd() + '/' + args.resultdir):
297  shutil.rmtree(os.getcwd() + '/' + args.resultdir)
298 
299  # Create nice clean folder for results
300  os.mkdir(os.getcwd() + '/' + args.resultdir)
301 
302  # Now let's create a path to simulate our events.
303  main = b2.create_path()
304  main.add_module("EventInfoSetter", evtNumList=[1])
305  main.add_module(PlotClusterPositionEstimatorPayload(args.resultdir))
306  main.add_module("Progress")
307 
308  b2.process(main)
309  print(b2.statistics)
Class to access a DBObjPtr from Python.
Definition: PyDBObj.h:48