Belle II Software prerelease-10-00-00a
GRLNeuroModule.cc
1/**************************************************************************
2 * basf2 (Belle II Analysis Software Framework) *
3 * Author: The Belle II Collaboration *
4 * *
5 * See git log for contributors and copyright holders. *
6 * This file is licensed under LGPL-3.0, see LICENSE.md. *
7 **************************************************************************/
8
9
10#include <framework/datastore/StoreArray.h>
11#include <framework/datastore/StoreObjPtr.h>
12#include <framework/dataobjects/EventMetaData.h>
13#include <framework/core/ModuleParam.templateDetails.h>
14#include <framework/gearbox/Unit.h>
15#include <framework/utilities/FileSystem.h>
16#include <analysis/utility/PCmsLabTransform.h>
17#include <trg/ecl/TrgEclMapping.h>
18#include <trg/ecl/dataobjects/TRGECLCluster.h>
19#include <trg/grl/dataobjects/GRLMLPData.h>
20#include <trg/grl/dataobjects/TRGGRLInfo.h>
21#include "trg/grl/dataobjects/TRGGRLUnpackerStore.h"
22#include "trg/grl/modules/trggrlneuralnet/GRLNeuroModule.h"
23
24#include <fstream>
25#include <cmath>
26#include <TFile.h>
27
28using namespace Belle2;
29using namespace std;
30
31//-----------------------------------------------------------------
32// Register the Module
33//-----------------------------------------------------------------
35
36//-----------------------------------------------------------------
37// Implementation
38//-----------------------------------------------------------------
39//
41{
43 "The NeuroTrigger module of the GRL.\n"
44 "Takes CDC track and ECL cluster to prepare input data\n"
45 "for the training of a neural network.\n"
46 "Networks are trained after the event loop and saved."
47 );
49 // parameters for saving / loading
50 addParam("TrgGrlInformation", m_TrgGrlInformationName,
51 "Name of the StoreArray holding the information of tracks and clusters from cdc ecl klm.",
52 string("TRGGRLObjects"));
53 addParam("HistFileName", m_HistFileName,
54 "Name of the root file saving the output histogram.",
55 string("hist.root"));
56 addParam("SaveHist", m_saveHist,
57 "Save the output histogram to root file.",
58 false);
59 addParam("nMLP", m_parameters.nMLP,
60 "Number of expert MLPs.", m_parameters.nMLP);
61 addParam("n_cdc_sector", m_parameters.n_cdc_sector,
62 "Number of expert CDC MLPs.", m_parameters.n_cdc_sector);
63 addParam("n_ecl_sector", m_parameters.n_ecl_sector,
64 "Number of expert ECL MLPs.", m_parameters.n_ecl_sector);
65 addParam("i_cdc_sector", m_parameters.i_cdc_sector,
66 "#cdc track of expert MLPs.", m_parameters.i_cdc_sector);
67 addParam("i_ecl_sector", m_parameters.i_ecl_sector,
68 "#ecl cluster of expert MLPs.", m_parameters.i_ecl_sector);
69 addParam("nHidden", m_parameters.nHidden,
70 "Number of nodes in each hidden layer for all networks "
71 "or factor to multiply with number of inputs (1 list or nMLP lists). "
72 "The number of layers is derived from the shape.",
73 m_parameters.nHidden);
74 addParam("multiplyHidden", m_parameters.multiplyHidden,
75 "If true, multiply nHidden with number of input nodes.",
76 m_parameters.multiplyHidden);
77 addParam("outputScale", m_parameters.outputScale,
78 "Output scale for all networks (1 value list or nMLP value lists). "
79 "Output[i] of the MLP is scaled from [-1, 1] "
80 "to [outputScale[2*i], outputScale[2*i+1]]. "
81 "(units: z[cm] / theta[degree])",
82 m_parameters.outputScale);
83 addParam("weightFiles", m_weightFileNames,
84 "Name of the file where the weights of MLPs are saved. "
85 "the default file is $BELLE2_LOCAL_DIR/data/trg/grl/weights.dat",
86 {FileSystem::findFile("/data/trg/grl/weights.dat", true)});
87 addParam("biasFiles", m_biasFileNames,
88 "Name of the file where the biases of MLPs are saved. "
89 "the default file is $BELLE2_LOCAL_DIR/data/trg/grl/bias.dat",
90 {FileSystem::findFile("/data/trg/grl/bias.dat", true)});
91 addParam("TRGECLClusters", m_TrgECLClusterName,
92 "Name of the StoreArray holding the information of trigger ecl clusters ",
93 string("TRGECLClusters"));
94 addParam("MVACut", m_nn_thres,
95 "Cut value applied to the MLP output",
96 {-1});
97 addParam("useDB", m_useDB,
98 "Flag to use database to set config", true);
99}
100
101
102void
104{
105
106 TrgEclMapping* trgecl_obj = new TrgEclMapping();
107 for (int tc = 1; tc <= 576; tc++) {
108 TCThetaID.push_back(trgecl_obj->getTCThetaIdFromTCId(tc));
109 }
110
111 //-----------------------------------------------------------------------------------------
112 //..ECL look up tables
113 PCmsLabTransform boostrotate;
114 for (int tc = 1; tc <= 576; tc++) {
115
116 //..Four vector of a 1 GeV lab photon at this TC
117 ROOT::Math::XYZVector CellPosition = trgecl_obj->getTCPosition(tc);
118 ROOT::Math::PxPyPzEVector CellLab;
119 CellLab.SetPx(CellPosition.Unit().X());
120 CellLab.SetPy(CellPosition.Unit().Y());
121 CellLab.SetPz(CellPosition.Unit().Z());
122 CellLab.SetE(1.);
123
124 TCThetaLab.push_back(CellLab.Theta()*TMath::RadToDeg());
125 TCPhiLab.push_back(CellLab.Phi()*TMath::RadToDeg() + 180.0);
126
127 }
128
129 delete trgecl_obj;
130
131}
132
133void
135{
136 if (m_useDB) {
137 if (not m_db_trggrlconfig.isValid()) {
138 StoreObjPtr<EventMetaData> evtMetaData;
139 B2FATAL("No database for TRG GRL config. exp " << evtMetaData->getExperiment() << " run "
140 << evtMetaData->getRun());
141 } else {
142 m_parameters.nMLP = m_db_trggrlconfig->get_ecltaunn_nMLP();
143 m_parameters.multiplyHidden = m_db_trggrlconfig->get_ecltaunn_multiplyHidden();
144 m_parameters.nHidden = m_db_trggrlconfig->get_ecltaunn_nHidden();
145 m_parameters.n_cdc_sector = m_db_trggrlconfig->get_ecltaunn_n_cdc_sector();
146 m_parameters.n_ecl_sector = m_db_trggrlconfig->get_ecltaunn_n_ecl_sector();
147 m_parameters.i_cdc_sector = m_db_trggrlconfig->get_ecltaunn_i_cdc_sector();
148 m_parameters.i_ecl_sector = m_db_trggrlconfig->get_ecltaunn_i_ecl_sector();
149 m_nn_thres[0] = m_db_trggrlconfig->get_ecltaunn_threshold();
150
151 m_GRLNeuro.initialize(m_parameters);
152
153 for (unsigned int isector = 0; isector < m_parameters.nMLP; isector++) {
154 if (!m_GRLNeuro.load(isector, m_db_trggrlconfig->get_ecltaunn_weight()[isector], m_db_trggrlconfig->get_ecltaunn_bias()[isector])) {
155 B2ERROR("weight of GRL ecltaunn could not be loaded correctly.");
156 }
157 }
158 }
159 }
160
161 if (m_saveHist) {
162 for (unsigned int isector = 0; isector < m_parameters.nMLP; isector++) {
163 h_target.push_back(new TH1D(("h_target_" + to_string(isector)).c_str(),
164 ("h_target_" + to_string(isector)).c_str(), 100, 0.0, 1.0));
165 }
166 }
167}
168
169void
171{
172
173 //ECL input
174 //..Use only clusters within 100 ns of event timing (from ECL).
175 //StoreArray<TRGECLTrg> trgArray;
179 std::vector<std::tuple<float, float, float, float>> eclClusters;
181 int necl = eclTrgClusterArray.getEntries();
182 for (int ic = 0; ic < necl; ic++) {
183 int TC = eclTrgClusterArray[ic]->getMaxTCId();
184 float energy = eclTrgClusterArray[ic]->getEnergyDep() * 1000.0;
185 float theta = TCThetaLab[TC - 1];
186 float phi = TCPhiLab[TC - 1];
187 float time = eclTrgClusterArray[ic]->getTimeAve();
188 eclClusters.emplace_back(energy, theta, phi, time);
189 }
190
191 std::sort(eclClusters.begin(), eclClusters.end(),
192 [](const std::tuple<float, float, float, float>& a,
193 const std::tuple<float, float, float, float>& b) {
194 return std::get<0>(a) > std::get<0>(b);
195 });
196
197 // MLPinput
198 std::vector<float> MLPinput;
199 MLPinput.clear();
200 MLPinput.assign(24, 0);
201
202 for (size_t i = 0; i < std::min(eclClusters.size(), size_t(6)); i++) {
203 MLPinput[i] = std::get<0>(eclClusters[i]); // E
204 MLPinput[6 + i] = std::get<1>(eclClusters[i]); // Theta
205 MLPinput[12 + i] = std::get<2>(eclClusters[i]); // Phi
206 MLPinput[18 + i] = std::get<3>(eclClusters[i]); // time
207 }
208
209
210
211 // //////////////////////////////////////////////////////////////////////////////////////////////////////////
212
213 //Energy, theta and phi data are the quantized.
214 //Energy: LSB = ADC count (5.5MeV)
215 //theta : 0 ~ 180∘, LSB = 1.6025∘
216 //phi : 0 ~ 360∘, LSB = 1.6025∘
217 float LSB_ADC = 1 / 5.5;
218 float LSB_angle = 1 / 1.6025;
220 std::for_each(MLPinput.begin() + 0, MLPinput.begin() + 6, [LSB_ADC](float & x) { x = std::ceil(x * LSB_ADC); });
221 std::for_each(MLPinput.begin() + 6, MLPinput.begin() + 12, [LSB_angle](float & x) { x = std::ceil(x * LSB_angle);});
222 std::for_each(MLPinput.begin() + 12, MLPinput.begin() + 18, [LSB_angle](float & x) { x = std::ceil(x * LSB_angle); });
223
224//new add
225// float LSB_time = 1.0f;
226// std::for_each(MLPinput.begin() + 18, MLPinput.begin() + 24, [](float &x) {
227// x = std::ceil(x);
228// x = std::min(x, 255.0f);
229// x = std::max(x, 0.0f);
230// });
231
232
233 float target = m_GRLNeuro.runMLP(0, MLPinput);
234 if (m_saveHist) {
235 h_target[0]->Fill(target);
236 }
237 if (target > m_nn_thres[0]) {
238 trgInfo->setTauNN(true);
239 } else trgInfo->setTauNN(false);
240
241}
242
243void
245{
246 if (m_saveHist) {
247 TFile* file = new TFile(TString(m_HistFileName), "recreate");
248 for (unsigned int isector = 0; isector < m_parameters.nMLP; isector++) {
249 h_target[isector]->Write();
250 }
251 file->Write();
252 file->Close();
253 }
254
255}
static std::string findFile(const std::string &path, bool silent=false)
Search for given file or directory in local or central release directory, and return absolute path if...
std::vector< double > TCThetaLab
Azimuth angle of a given TRGcluster.
DBObjPtr< TRGGRLConfig > m_db_trggrlconfig
dbobject to store grl config
std::string m_HistFileName
Name of root file to save the histogram.
virtual void initialize() override
Initialize the module.
std::string m_TrgECLClusterName
Name of the StoreArray containing the ECL clusters.
std::vector< int > TCThetaID
TCID of a given TRGcluster.
GRLNeuro m_GRLNeuro
Instance of the NeuroTrigger.
virtual void event() override
Called once for each event.
GRLNeuro::Parameters m_parameters
Parameters for the NeuroTrigger.
GRLNeuroModule()
Constructor, for setting module description and parameters.
virtual void terminate() override
This method is called at the end of the event processing.
std::vector< std::string > m_weightFileNames
Name of file where network weights etc.
virtual void beginRun() override
Register run-dependent DataStore arrays.
std::string m_TrgGrlInformationName
name of TRG GRL information object
bool m_saveHist
save the output histogram
std::vector< float > m_nn_thres
cut on MVA to separate signal
std::vector< TH1D * > h_target
Histograms to save the NN classifiers.
bool m_useDB
flag to use database to load config
std::vector< std::string > m_biasFileNames
Name of file where network bias etc.
Class to represent the GRL Neuro.
Definition GRLNeuro.h:35
void setDescription(const std::string &description)
Sets the description of the module.
Definition Module.cc:214
void setPropertyFlags(unsigned int propertyFlags)
Sets the flags for the module properties.
Definition Module.cc:208
Module()
Constructor.
Definition Module.cc:30
@ c_ParallelProcessingCertified
This module can be run in parallel processing mode safely (All I/O must be done through the data stor...
Definition Module.h:80
Class to hold Lorentz transformations from/to CMS and boost vector.
Accessor to arrays stored in the data store.
Definition StoreArray.h:113
int getEntries() const
Get the number of objects in the array.
Definition StoreArray.h:216
Type-safe access to single objects in the data store.
Definition StoreObjPtr.h:96
A class of TC Mapping.
int getTCThetaIdFromTCId(int)
get [TC Theta ID] from [TC ID]
ROOT::Math::XYZVector getTCPosition(int)
TC position (cm)
void addParam(const std::string &name, T &paramVariable, const std::string &description, const T &defaultValue)
Adds a new parameter to the module.
Definition Module.h:559
#define REG_MODULE(moduleName)
Register the given module (without 'Module' suffix) with the framework.
Definition Module.h:649
Abstract base class for different kinds of events.
STL namespace.