Belle II Software  light-2205-abys
b2pandas_utils.py
1 
8 
9 import basf2
10 import variables
11 import tables
12 import numpy as np
13 import warnings
14 
15 
16 """
17 Python uilities to help create or manage ntuples and work with them in pandas
18 """
19 
20 
21 class VariablesToHDF5(basf2.Module):
22  """
23  Dump variables directly to HDF5
24 
25  This Module is the equivalent of VariablesToNtuple but creates an hdf5 file
26  instead of a root file. It is slower as it is implemented in pure python and
27  should currently be considered a proof of concept.
28  """
29 
30  def __init__(self, listname, variables, filename):
31  """Constructor to initialize the internal state
32 
33  Arguments:
34  listname(str): name of the particle list
35  variables(list(str)): list of variables to save for each particle
36  filename(str): name of the hdf5 file to be created
37  """
38  super().__init__()
39 
40  self._filename_filename = filename
41 
42  self._listname_listname = listname
43 
44  self._variables_variables = variables
45 
46  def initialize(self):
47  """Create the hdf5 file and list of variable objects to be used during
48  event processing."""
49  import ROOT # noqa
50 
51  self._varnames_varnames = [
52  str(varname) for varname in variables.variables.resolveCollections(
54  *self._variables_variables))]
55 
56  self._var_objects_var_objects = [variables.variables.getVariable(n) for n in self._varnames_varnames]
57 
58 
59  self._evtmeta_evtmeta = ROOT.Belle2.PyStoreObj("EventMetaData")
60  self._evtmeta_evtmeta.isRequired()
61 
62  self._plist_plist = ROOT.Belle2.PyStoreObj(self._listname_listname)
63  self._plist_plist.isRequired()
64 
65 
66  self._hdf5file_hdf5file = tables.open_file(self._filename_filename, mode="w", title="Belle2 Variables to HDF5")
67  if not self._hdf5file_hdf5file:
68  basf2.B2ERROR("Cannot create output file")
69  return
70 
71  dtype = [("exp", np.int32), ("run", np.int32), ("evt", np.uint32),
72  ("prod", np.uint32), ("icand", np.uint32), ("ncand", np.uint32)]
73  for name in self._varnames_varnames:
74  # only float variables for now
75  dtype.append((name, np.float64))
76 
77 
78  self._dtype_dtype = dtype
79  filters = tables.Filters(complevel=1, complib='blosc:lz4', fletcher32=False)
80  # some variable names are not just A-Za-z0-9 so pytables complains but
81  # seems to work. Ignore warning
82  with warnings.catch_warnings():
83  warnings.simplefilter("ignore")
84 
85  self._table_table = self._hdf5file_hdf5file.create_table("/", self._listname_listname, obj=np.zeros(0, dtype), filters=filters)
86 
87  def event(self):
88  """Create a new row in the hdf5 file with for each particle in the list"""
89  buf = np.empty(self._plist_plist.getListSize(), dtype=self._dtype_dtype)
90  # add some extra columns for bookkeeping
91  buf["exp"] = self._evtmeta_evtmeta.getExperiment()
92  buf["run"] = self._evtmeta_evtmeta.getRun()
93  buf["evt"] = self._evtmeta_evtmeta.getEvent()
94  buf["prod"] = self._evtmeta_evtmeta.getProduction()
95  buf["ncand"] = len(buf)
96  buf["icand"] = np.arange(len(buf))
97 
98  for row, p in zip(buf, self._plist_plist):
99  for name, v in zip(self._varnames_varnames, self._var_objects_var_objects):
100  # pyroot proxy not working with callables, we should fix this.
101  # For now we need to go back by name and call it.
102  # should be `row[v.name] = v.func(p)`
103  row[name] = variables.variables.evaluate(v.name, p)
104 
105  self._table_table.append(buf)
106 
107  def terminate(self):
108  """save and close the output"""
109  self._table_table.flush()
110  self._hdf5file_hdf5file.close()
111 
112 
113 def make_mcerrors_readable(dataframe, column="mcErrors"):
114  """
115  Take a dataframe containing an column with the output of the :b2:var:`mcErrors`
116  variable from :b2:mod:`VariablesToNTuple` and convert it to a readable set
117  of columns of the form ``{column}_{name}`` where column is the value of the
118  ``column`` argument and ``name`` is one of one of the :ref:`mcmatching`
119  error flags (without the leading 'c_').
120 
121  Arguments:
122  dataframe(pandas.DataFrame): the pandas dataframe containing an ntuple
123  with column containing the output of the mcErrors variable
124  column(str): the name containing the values from the mcErrors variable
125  """
126  import ROOT # noqa
127 
128  if column not in dataframe:
129  raise KeyError(f"Cannot find coulumn '{column}'")
130 
131  # convert mcErrors to int to be able to logical operate on it
132  mcErrors = dataframe[column].astype(int)
133 
134  # and loop over all the c_ constants in the Belle2.MCMatching class
135  for flag in (e for e in dir(ROOT.Belle2.MCMatching) if e.startswith("c_")):
136  try:
137  value = int(getattr(ROOT.Belle2.MCMatching, flag))
138  except ValueError:
139  # probably the extraInfo column name, ignore
140  continue
141 
142  # and set the column
143  name = column + flag[1:]
144  if value == 0:
145  dataframe[name] = mcErrors == 0
146  else:
147  dataframe[name] = (mcErrors & value) == value
148 
149 
150 # This is just for testing, no need for doxygen to weirdly document it
151 # @cond
152 if __name__ == "__main__":
153  import modularAnalysis
154 
155  p = basf2.create_path()
156  p.add_module("EventInfoSetter", evtNumList=100)
157  p.add_module("EvtGenInput")
158  modularAnalysis.fillParticleListsFromMC([("pi-:gen", "")], path=p)
159  a = VariablesToHDF5("pi-:gen", ["M", "E", "px", "py", "pz"], "test.hdf5")
160  p.add_module(a)
161  # Process the events
162  basf2.process(p)
163  print(basf2.statistics)
164 # @endcond
def std_vector(*args)
Definition: __init__.py:126
_plist
Pointer to the particle list.
_var_objects
variable objects for each variable
_listname
Particle list name.
def __init__(self, listname, variables, filename)
def fillParticleListsFromMC(decayStringsWithCuts, addDaughters=False, skipNonPrimaryDaughters=False, writeOut=False, path=None, skipNonPrimary=False)