Belle II Software  release-08-01-10
plotPXDPositionEstimator.py
1 #!/usr/bin/env python3
2 
3 
10 
11 """
12 Plot PXD position estimator payload
13 """
14 
15 import numpy as np
16 import matplotlib.pyplot as plt
17 import pandas as pd
18 import seaborn as sns
19 
20 import basf2 as b2
21 from ROOT import Belle2
22 
23 
25  """Plot the PXDClusterPositionEstimator playload """
26 
27  def __init__(self, resultdir):
28  """Initialize"""
29  super().__init__() # don't forget to call parent constructor
30 
31  self.position_estimatorposition_estimator = Belle2.PyDBObj('PXDClusterPositionEstimatorPar')
32 
33  self.resultdirresultdir = resultdir
34 
35  self.countercounter = 0
36 
37  def event(self):
38  """Plot position payload in case it has changed"""
39 
40  # We plot the payload whenever it changes
41  if self.position_estimatorposition_estimator.hasChanged():
42  b2.B2INFO("PXDClusterPositionEstimator payload has changed. Plot the new payload.")
43  self.plotplot()
44  self.countercounter += 1
45 
46  def average_covariance(self, shape_classifier):
47  """Compute the average covariance for a shape classifier"""
48  flat_covs = []
49  weights = []
50 
51  offsetMap = shape_classifier.getOffsetMap()
52  likelyhoodMap = shape_classifier.getLikelyhoodMap()
53 
54  for item in offsetMap:
55  shape_index = item.first
56  offsets = item.second
57 
58  for eta_index, offset in enumerate(offsets):
59  likelyhood = likelyhoodMap[shape_index][eta_index]
60  flat_covs.append([offset.getUSigma2(), offset.getVSigma2(), offset.getUVCovariance()])
61  weights.append(likelyhood)
62 
63  weights = np.array(weights)
64  flat_covs = np.array(flat_covs)
65  flat_average = np.average(flat_covs, axis=0, weights=weights)
66  return np.array([[flat_average[0], flat_average[2]], [flat_average[2], flat_average[1]]])
67 
68  def plot(self):
69  """Plot position estimator payload"""
70  for pair in self.position_estimatorposition_estimator.getGridMap():
71  pixelkind = pair.first
72  grid = pair.second
73 
74  # Keep some numbers to gain overview
75  summary_dict = {'shapes': [],
76  'corrections': [],
77  'coverage': [],
78  'sigma_u': [],
79  'sigma_v': [],
80  'corr_uv': [],
81  'thetaU': [],
82  'thetaV': [],
83  'pixelkind': [],
84  }
85 
86  # Loop over angular grid and plot shape classifiers
87  for uBin in range(1, grid.GetXaxis().GetNbins() + 1):
88  for vBin in range(1, grid.GetYaxis().GetNbins() + 1):
89 
90  # Shape classifier for angle bin
91  shape_classifier = self.position_estimatorposition_estimator.getShapeClassifier(uBin, vBin, pixelkind)
92 
93  # Bin is centered around angles
94  thetaU = grid.GetXaxis().GetBinCenter(uBin)
95  thetaV = grid.GetYaxis().GetBinCenter(vBin)
96 
97  # Create plots for classifier
98  stats = self.get_classifier_statsget_classifier_stats(shape_classifier, pixelkind)
99 
100  # Fill summary dict
101  summary_dict['thetaU'].append(thetaU)
102  summary_dict['thetaV'].append(thetaV)
103  summary_dict['pixelkind'].append(pixelkind)
104  summary_dict['shapes'].append(stats['shapes'])
105  summary_dict['corrections'].append(stats['corrections'])
106  summary_dict['coverage'].append(stats['coverage'])
107  summary_dict['sigma_u'].append(stats['sigma_u'] * 10000) # cm -> um
108  summary_dict['sigma_v'].append(stats['sigma_v'] * 10000) # cm -> um
109  summary_dict['corr_uv'].append(stats['corr_uv'])
110 
111  # Create heatmaps to gain overview
112  df = pd.DataFrame(summary_dict)
113 
114  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='corrections')
115  fig = plt.figure(figsize=(12, 12))
116  ax = fig.add_subplot(111)
117  ax.set_xlabel('thetaU / degree', size=20)
118  ax.set_ylabel('thetaV / degree', size=20)
119  ax.set_title(f'Number of corrections kind={pixelkind:d}', size=20)
120  ax = sns.heatmap(
121  pivot_table,
122  mask=pivot_table.isnull(),
123  annot=True,
124  fmt="d",
125  linewidths=.5,
126  square=True,
127  cmap='Blues_r',
128  cbar_kws={
129  'label': '#corrections'})
130  ax.invert_yaxis()
131  fig.savefig(self.resultdirresultdir + f'/Corrections_Heatmap_kind_{pixelkind:d}.png', dpi=100)
132  fig.clf()
133  plt.close(fig)
134 
135  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='shapes')
136  fig = plt.figure(figsize=(12, 12))
137  ax = fig.add_subplot(111)
138  ax.set_xlabel('thetaU / degree', size=20)
139  ax.set_ylabel('thetaV / degree', size=20)
140  ax.set_title(f'Number of shapes kind={pixelkind:d}', size=20)
141  ax = sns.heatmap(
142  pivot_table,
143  mask=pivot_table.isnull(),
144  annot=True,
145  fmt="d",
146  linewidths=.5,
147  square=True,
148  cmap='Blues_r',
149  cbar_kws={
150  'label': '#shapes'})
151  ax.invert_yaxis()
152  fig.savefig(self.resultdirresultdir + f'/Shapes_Heatmap_kind_{pixelkind:d}.png', dpi=100)
153  fig.clf()
154  plt.close(fig)
155 
156  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='coverage')
157  fig = plt.figure(figsize=(12, 12))
158  ax = fig.add_subplot(111)
159  ax.set_xlabel('thetaU / degree', size=20)
160  ax.set_ylabel('thetaV / degree', size=20)
161  ax.set_title(f'Coverage kind={pixelkind:d}', size=20)
162  ax = sns.heatmap(
163  pivot_table,
164  mask=pivot_table.isnull(),
165  annot=True,
166  fmt=".1f",
167  linewidths=.5,
168  square=True,
169  cmap='Blues_r',
170  cbar_kws={
171  'label': 'coverage / %'})
172  ax.invert_yaxis()
173  fig.savefig(self.resultdirresultdir + f'/Coverage_Heatmap_kind_{pixelkind:d}.png', dpi=100)
174  fig.clf()
175  plt.close(fig)
176 
177  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='sigma_u')
178  fig = plt.figure(figsize=(12, 12))
179  ax = fig.add_subplot(111)
180  ax.set_xlabel('thetaU / degree', size=20)
181  ax.set_ylabel('thetaV / degree', size=20)
182  ax.set_title(f'Average cluster sigma u kind={pixelkind:d}', size=20)
183  ax = sns.heatmap(
184  pivot_table,
185  mask=pivot_table.isnull(),
186  annot=True,
187  fmt=".1f",
188  linewidths=.5,
189  square=True,
190  cmap='Blues_r',
191  cbar_kws={
192  'label': 'sigma u / um'})
193  ax.invert_yaxis()
194  fig.savefig(self.resultdirresultdir + f'/SigmaU_Heatmap_kind_{pixelkind:d}.png', dpi=100)
195  fig.clf()
196  plt.close(fig)
197 
198  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='sigma_v')
199  fig = plt.figure(figsize=(12, 12))
200  ax = fig.add_subplot(111)
201  ax.set_xlabel('thetaU / degree', size=20)
202  ax.set_ylabel('thetaV / degree', size=20)
203  ax.set_title(f'Average cluster sigma v kind={pixelkind:d}', size=20)
204  ax = sns.heatmap(
205  pivot_table,
206  mask=pivot_table.isnull(),
207  annot=True,
208  fmt=".1f",
209  linewidths=.5,
210  square=True,
211  cmap='Blues_r',
212  cbar_kws={
213  'label': 'sigma v / um'})
214  ax.invert_yaxis()
215  fig.savefig(self.resultdirresultdir + f'/SigmaV_Heatmap_kind_{pixelkind:d}.png', dpi=100)
216  fig.clf()
217  plt.close(fig)
218 
219  pivot_table = df.pivot(index='thetaU', columns='thetaV', values='corr_uv')
220  fig = plt.figure(figsize=(12, 12))
221  ax = fig.add_subplot(111)
222  ax.set_xlabel('thetaU / degree', size=20)
223  ax.set_ylabel('thetaV / degree', size=20)
224  ax.set_title(f'Average uv correlation kind={pixelkind:d}', size=20)
225  ax = sns.heatmap(
226  pivot_table,
227  mask=pivot_table.isnull(),
228  annot=True,
229  fmt=".1f",
230  linewidths=.5,
231  square=True,
232  cmap='Blues_r',
233  cbar_kws={
234  'label': 'correlation'})
235  ax.invert_yaxis()
236  fig.savefig(self.resultdirresultdir + f'/CorrelationUV_Heatmap_kind_{pixelkind:d}.png', dpi=100)
237  fig.clf()
238  plt.close(fig)
239 
240  def get_classifier_stats(self, shape_classifier, pixelkind):
241  """Compute some statistics for a shape classifier"""
242  # Read corrections data
243  offsetMap = shape_classifier.getOffsetMap()
244  likelyhoodMap = shape_classifier.getLikelyhoodMap()
245 
246  # Some counters
247  nCorrections = 0
248  nShapes = 0
249  coverage = 0.0
250 
251  for item in offsetMap:
252  shape_index = item.first
253  offsets_array = item.second
254 
255  nShapes += 1
256 
257  for eta_index, offset in enumerate(offsets_array):
258  coverage += likelyhoodMap[shape_index][eta_index]
259  nCorrections += 1
260 
261  if nShapes > 0:
262  # Average covariance of all hits in classifier
263  cov = self.average_covarianceaverage_covariance(shape_classifier)
264  sigma_u = np.sqrt(cov[0, 0])
265  sigma_v = np.sqrt(cov[1, 1])
266  corr_uv = cov[0, 1] / (np.sqrt(cov[0, 0]) * np.sqrt(cov[1, 1]))
267  else:
268  sigma_u = 0.0
269  sigma_v = 0.0
270  corr_uv = 0.0
271 
272  # Create some summary statistics
273  stats = {}
274  stats['shapes'] = nShapes
275  stats['corrections'] = nCorrections
276  stats['coverage'] = 100 * coverage
277  stats['sigma_u'] = sigma_u
278  stats['sigma_v'] = sigma_v
279  stats['corr_uv'] = corr_uv
280 
281  # return some statistics
282  return stats
283 
284 
285 if __name__ == "__main__":
286 
287  import argparse
288  import os
289  import shutil
290  parser = argparse.ArgumentParser(description="Plot summary of hit estimator")
291  parser.add_argument('--resultdir', default="results", type=str, help='Put all plots in this directory')
292  args = parser.parse_args()
293 
294  # Remove old stuff
295  if os.path.isdir(os.getcwd() + '/' + args.resultdir):
296  shutil.rmtree(os.getcwd() + '/' + args.resultdir)
297 
298  # Create nice clean folder for results
299  os.mkdir(os.getcwd() + '/' + args.resultdir)
300 
301  # Now let's create a path to simulate our events.
302  main = b2.create_path()
303  main.add_module("EventInfoSetter", evtNumList=[1])
304  main.add_module(PlotClusterPositionEstimatorPayload(args.resultdir))
305  main.add_module("Progress")
306 
307  b2.process(main)
308  print(b2.statistics)
Class to access a DBObjPtr from Python.
Definition: PyDBObj.h:50