Belle II Software development
SVDTimeGroupingModule.cc
1/**************************************************************************
2 * basf2 (Belle II Analysis Software Framework) *
3 * Author: The Belle II Collaboration *
4 * *
5 * See git log for contributors and copyright holders. *
6 * This file is licensed under LGPL-3.0, see LICENSE.md. *
7 **************************************************************************/
8
9#include <svd/modules/svdTimeGrouping/SVDTimeGroupingModule.h>
10
11// framework
12#include <framework/logging/Logger.h>
13#include <framework/utilities/FileSystem.h>
14
15// svd
16#include <svd/geometry/SensorInfo.h>
17#include <svd/dataobjects/SVDEventInfo.h>
18
19// root
20#include <TString.h>
21
22using namespace Belle2;
23
24
25REG_MODULE(SVDTimeGrouping);
26
27
29 Module()
30{
31 setDescription("Assigns the time-group Id to SVD clusters.");
33
34 // 1a. Collections.
35 addParam("SVDClusters", m_svdClustersName, "SVDCluster collection name", std::string(""));
36 addParam("SVDEventInfo", m_svdEventInfoName,
37 "SVDEventInfo collection name.", std::string("SVDEventInfo"));
38
39 // 1b. Module Configuration
40 addParam("forceGroupingFromDB", m_forceGroupingFromDB,
41 "use SVDRecoConfiguration from DB", bool(true));
42 addParam("isEnabledIn6Samples", m_isEnabledIn6Samples,
43 "if true, module is enabled for 6-sample DAQ mode", bool(false));
44 addParam("isEnabledIn3Samples", m_isEnabledIn3Samples,
45 "if true, module is enabled for 3-sample DAQ mode", bool(false));
46 addParam("useParamFromDB", m_useParamFromDB,
47 "use SVDTimeGroupingConfiguration from DB", bool(true));
48
49 // 2. Fill time Histogram:
50 addParam("tRangeLow", m_usedPars.tRange[0], "This sets the x- range of histogram [ns].",
51 float(-160.));
52 addParam("tRangeHigh", m_usedPars.tRange[1], "This sets the x+ range of histogram [ns].",
53 float(160.));
54 addParam("rebinningFactor", m_usedPars.rebinningFactor,
55 "Time bin width is 1/rebinningFactor ns. Disables the module if set zero",
56 int(2));
57 addParam("fillSigmaN", m_usedPars.fillSigmaN,
58 "Number of Gaussian sigmas (= hardcoded resolutions) used to fill the time histogram for each cluster.",
59 float(3.));
60
61 // 3. Search peaks:
62 addParam("minSigma", m_usedPars.limitSigma[0],
63 "Lower limit of cluster time sigma for the fit for the peak-search [ns].",
64 float(1.));
65 addParam("maxSigma", m_usedPars.limitSigma[1],
66 "Upper limit of cluster time sigma for the fit for the peak-search [ns].",
67 float(15.));
68 addParam("fitRangeHalfWidth", m_usedPars.fitRangeHalfWidth,
69 "half width of the range in which the fit for the peak-search is performed [ns].",
70 float(5.));
71 addParam("removeSigmaN", m_usedPars.removeSigmaN,
72 "Evaluate and remove gauss upto N sigma.",
73 float(7.));
74 addParam("fracThreshold", m_usedPars.fracThreshold,
75 "Minimum fraction of candidates in a peak (wrt to the highest peak) considered for fitting in the peak-search.",
76 float(0.05));
77 addParam("maxGroups", m_usedPars.maxGroups,
78 "Maximum number of groups to be accepted.",
79 int(20));
80
81 // 4. Sort groups:
82 addParam("expectedSignalTimeCenter", m_usedPars.expectedSignalTime[1],
83 "Expected time of the signal [ns].",
84 float(0.));
85 addParam("expectedSignalTimeMin", m_usedPars.expectedSignalTime[0],
86 "Expected low range of signal hits [ns].",
87 float(-50.));
88 addParam("expectedSignalTimeMax", m_usedPars.expectedSignalTime[2],
89 "Expected high range of signal hits [ns].",
90 float(50.));
91 addParam("signalLifetime", m_usedPars.signalLifetime,
92 "Group prominence is weighted with exponential weight with a lifetime defined by this parameter [ns].",
93 float(30.));
94
95 // 5. Signal group selection:
96 addParam("acceptSigmaN", m_usedPars.acceptSigmaN,
97 "Accept clusters upto N sigma.",
98 float(7.));
99 addParam("writeGroupInfo", m_usedPars.writeGroupInfo,
100 "Write group info into SVDClusters.",
101 bool(true));
102
103 // 6. Handle out-of-range clusters:
104 addParam("includeOutOfRangeClusters", m_usedPars.includeOutOfRangeClusters,
105 "Assign groups to under and overflow.",
106 bool(true));
107
108 // 7. svd time resolution for 3 sensor types and V/U side, w.r.t. clsSize
109 m_usedPars.clsSigma[0][0] = {2.0417, 2.3606, 2.1915, 1.9810, 1.8042, 1.6205};
110 m_usedPars.clsSigma[0][1] = {3.5880, 3.4526, 2.9363, 2.6833, 2.5342, 2.2895};
111 m_usedPars.clsSigma[1][0] = {2.1069, 2.0530, 1.9895, 1.8720, 1.6453, 1.5905};
112 m_usedPars.clsSigma[1][1] = {3.3919, 2.2280, 2.1177, 2.0852, 1.9968, 1.9914};
113 m_usedPars.clsSigma[2][0] = {1.6863, 1.9920, 1.8498, 1.7737, 1.6320, 1.5629};
114 m_usedPars.clsSigma[2][1] = {3.2798, 3.2243, 2.9404, 2.7911, 2.6331, 2.5666};
115
116}
117
118
119
121{
123 if (!m_recoConfig.isValid())
124 B2FATAL("no valid configuration found for SVD reconstruction");
125 else
126 B2DEBUG(20, "SVDRecoConfiguration: from now on we are using " << m_recoConfig->get_uniqueID());
127
128 m_isEnabledIn6Samples = m_recoConfig->isSVDTimeGroupingEnabled(6);
129 m_isEnabledIn3Samples = m_recoConfig->isSVDTimeGroupingEnabled(3);
130 }
131
133 B2INFO("SVDTimeGrouping : SVDCluster groupId is assigned for 6-sample DAQ mode.");
134 else
135 B2INFO("SVDTimeGrouping : SVDCluster groupId is not assigned for 6-sample DAQ mode.");
136
138 B2INFO("SVDTimeGrouping : SVDCluster groupId is assigned for 3-sample DAQ mode.");
139 else
140 B2INFO("SVDTimeGrouping : SVDCluster groupId is not assigned for 3-sample DAQ mode.");
141
142 if (m_useParamFromDB &&
144
145 if (!m_recoConfig.isValid())
146 B2FATAL("no valid configuration found for SVD reconstruction");
147 else
148 B2DEBUG(20, "SVDRecoConfiguration: from now on we are using " << m_recoConfig->get_uniqueID());
149
150 TString timeRecoWith6SamplesAlgorithm = m_recoConfig->getTimeRecoWith6Samples();
151 TString timeRecoWith3SamplesAlgorithm = m_recoConfig->getTimeRecoWith3Samples();
152
153 if (!m_groupingConfig.isValid())
154 B2FATAL("no valid configuration found for SVDTimeGrouping");
155 else
156 B2DEBUG(20, "SVDTimeGroupingConfiguration: from now on we are using " << m_groupingConfig->get_uniqueID());
157
158 m_usedParsIn6Samples = m_groupingConfig->getTimeGroupingParameters(timeRecoWith6SamplesAlgorithm, 6);
159 m_usedParsIn3Samples = m_groupingConfig->getTimeGroupingParameters(timeRecoWith3SamplesAlgorithm, 3);
160 }
161}
162
163
164
166{
167 // prepare all store:
169
170 B2DEBUG(20, "SVDTimeGroupingModule \nsvdClusters: " << m_svdClusters.getName());
171}
172
173
174
176{
178 if (int(m_svdClusters.getEntries()) < 10) return;
179
180
181 // first take Event Informations:
183 if (!temp_eventinfo.isValid())
184 m_svdEventInfoName = "SVDEventInfoSim";
186 if (!eventinfo) B2ERROR("No SVDEventInfo!");
187 int numberOfAcquiredSamples = eventinfo->getNSamples();
188
189 // then use the respective parameters
190 if (numberOfAcquiredSamples == 6) {
191 if (!m_isEnabledIn6Samples) return;
193 } else if (numberOfAcquiredSamples == 3) {
194 if (!m_isEnabledIn3Samples) return;
196 }
197
198 // declare and fill the histogram shaping each cluster with a normalised gaussian
199 // G(cluster time, resolution)
200 TH1D h_clsTime;
201 createAndFillHistorgram(h_clsTime);
202
203
204
205 // now we search for peaks and when we find one we remove it from the distribution, one by one.
206
207 std::vector<GroupInfo> groupInfoVector; // Gauss parameters (integral, center, sigma)
208
209 // performing the search
210 searchGausPeaksInHistogram(h_clsTime, groupInfoVector);
211 // resize to max
212 resizeToMaxSize(groupInfoVector);
213 // sorting background groups
214 sortBackgroundGroups(groupInfoVector);
215 // sorting signal groups
216 sortSignalGroups(groupInfoVector);
217
218 // assign the groupID to clusters
219 assignGroupIdsToClusters(h_clsTime, groupInfoVector);
220
221} // end of event
222
223
224
225
226
228{
229
230 // minimise the range of the histogram removing empty bins at the edge
231 // to speed up the execution time.
232
233 int totClusters = m_svdClusters.getEntries();
234
235 double tmpRange[2] = {std::numeric_limits<double>::quiet_NaN(), std::numeric_limits<double>::quiet_NaN()};
236 for (int ij = 0; ij < totClusters; ij++) {
237 double clsTime = m_svdClusters[ij]->getClsTime();
238 if (std::isnan(tmpRange[0]) || clsTime > tmpRange[0]) tmpRange[0] = clsTime;
239 if (std::isnan(tmpRange[1]) || clsTime < tmpRange[1]) tmpRange[1] = clsTime;
240 }
241 double tRangeHigh = m_usedPars.tRange[1];
242 double tRangeLow = m_usedPars.tRange[0];
243 if (tRangeHigh > tmpRange[0]) tRangeHigh = tmpRange[0];
244 if (tRangeLow < tmpRange[1]) tRangeLow = tmpRange[1];
245
246 int nBin = tRangeHigh - tRangeLow;
247 if (nBin < 1) nBin = 1;
249 if (nBin < 2) nBin = 2;
250 B2DEBUG(21, "tRange: [" << tRangeLow << "," << tRangeHigh << "], nBin: " << nBin);
251
252 hist = TH1D("h_clsTime", "h_clsTime", nBin, tRangeLow, tRangeHigh);
253 hist.GetXaxis()->SetLimits(tRangeLow, tRangeHigh);
254
255 for (int ij = 0; ij < totClusters; ij++) {
256 double clsSize = m_svdClusters[ij]->getSize();
257 bool isUcls = m_svdClusters[ij]->isUCluster();
258 int sType = getSensorType(m_svdClusters[ij]->getSensorID());
259 double gSigma = (clsSize >= int(m_usedPars.clsSigma[sType][isUcls].size()) ?
260 m_usedPars.clsSigma[sType][isUcls].back() :
261 m_usedPars.clsSigma[sType][isUcls][clsSize - 1]);
262 double gCenter = m_svdClusters[ij]->getClsTime();
263
264 // adding/filling a gauss to histogram
265 addGausToHistogram(hist, 1., gCenter, gSigma, m_usedPars.fillSigmaN);
266 }
267
268} // end of createAndFillHistorgram
269
270
271void SVDTimeGroupingModule::searchGausPeaksInHistogram(TH1D& hist, std::vector<GroupInfo>& groupInfoVector)
272{
273
274 double maxPeak = 0.; // height of the highest peak in signal region [expectedSignalTimeMin, expectedSignalTimeMax]
275 double maxIntegral = 0.; // integral of the highest peak in signal region [expectedSignalTimeMin, expectedSignalTimeMax]
276
277 bool amDone = false;
278 int roughCleaningCounter = 0; // handle to take care when fit does not conserves
279 while (!amDone) {
280
281 // take the bin corresponding to the highest peak
282 int maxBin = hist.GetMaximumBin();
283 double maxBinCenter = hist.GetBinCenter(maxBin);
284 double maxBinContent = hist.GetBinContent(maxBin);
285
286 // Set maxPeak for the first time
287 if (maxPeak == 0 &&
288 maxBinCenter > m_usedPars.expectedSignalTime[0] && maxBinCenter < m_usedPars.expectedSignalTime[2])
289 maxPeak = maxBinContent;
290 // we are done if the the height of the this peak is below threshold
291 if (maxPeak != 0 && maxBinContent < maxPeak * m_usedPars.fracThreshold) { amDone = true; continue;}
292
293
294
295 // preparing the gaus function for fitting the peak
296 TF1 ngaus("ngaus", myGaus,
297 hist.GetXaxis()->GetXmin(), hist.GetXaxis()->GetXmax(), 3);
298
299 // setting the parameters according to the maxBinCenter and maxBinContnet
300 double maxPar0 = maxBinContent * 2.50662827 * m_usedPars.fitRangeHalfWidth; // sqrt(2*pi) = 2.50662827
301 ngaus.SetParameter(0, maxBinContent);
302 ngaus.SetParLimits(0,
303 maxPar0 * 0.01,
304 maxPar0 * 2.);
305 ngaus.SetParameter(1, maxBinCenter);
306 ngaus.SetParLimits(1,
307 maxBinCenter - m_usedPars.fitRangeHalfWidth * 0.2,
308 maxBinCenter + m_usedPars.fitRangeHalfWidth * 0.2);
309 ngaus.SetParameter(2, m_usedPars.fitRangeHalfWidth);
310 ngaus.SetParLimits(2,
313
314
315 // fitting the gauss at the peak the in range [-fitRangeHalfWidth, fitRangeHalfWidth]
316 int status = hist.Fit("ngaus", "NQ0", "",
317 maxBinCenter - m_usedPars.fitRangeHalfWidth,
318 maxBinCenter + m_usedPars.fitRangeHalfWidth);
319
320
321 if (!status) { // if fit converges
322
323 double pars[3] = {
324 ngaus.GetParameter(0), // integral
325 ngaus.GetParameter(1), // center
326 std::fabs(ngaus.GetParameter(2)) // sigma
327 };
328
329 // fit converges but paramters are at limit
330 // Do a rough cleaning
331 if (pars[2] <= m_usedPars.limitSigma[0] + 0.01 || pars[2] >= m_usedPars.limitSigma[1] - 0.01) {
332 // subtract the faulty part from the histogram
334 if (roughCleaningCounter++ > m_usedPars.maxGroups) amDone = true;
335 continue;
336 }
337
338 // Set maxIntegral for the first time
339 if (maxPeak != 0 && maxIntegral == 0) maxIntegral = pars[0];
340 // we are done if the the integral of the this peak is below threshold
341 if (maxIntegral != 0 && pars[0] < maxIntegral * m_usedPars.fracThreshold) { amDone = true; continue;}
342
343
344 // now subtract the fitted gaussian from the histogram
345 subtractGausFromHistogram(hist, pars[0], pars[1], pars[2], m_usedPars.removeSigmaN);
346
347 // store group information (integral, position, width)
348 groupInfoVector.push_back(GroupInfo(pars[0], pars[1], pars[2]));
349 B2DEBUG(21, " group " << int(groupInfoVector.size())
350 << " pars[0] " << pars[0] << " pars[1] " << pars[1] << " pars[2] " << pars[2]);
351
352 if (int(groupInfoVector.size()) >= m_usedPars.maxGroups) { amDone = true; continue;}
353
354 } else { // fit did not converges
355 // subtract the faulty part from the histogram
357 if (roughCleaningCounter++ > m_usedPars.maxGroups) amDone = true;
358 continue;
359 }
360 }
361
362} // end of searchGausPeaksInHistogram
363
364
365
366void SVDTimeGroupingModule::sortBackgroundGroups(std::vector<GroupInfo>& groupInfoVector)
367{
368 GroupInfo keyGroup;
369 for (int ij = int(groupInfoVector.size()) - 2; ij >= 0; ij--) {
370 keyGroup = groupInfoVector[ij];
371 double keyGroupIntegral = std::get<0>(keyGroup);
372 double keyGroupCenter = std::get<1>(keyGroup);
373 bool isKeyGroupSignal = true;
374 if (keyGroupIntegral != 0. &&
375 (keyGroupCenter < m_usedPars.expectedSignalTime[0] || keyGroupCenter > m_usedPars.expectedSignalTime[2]))
376 isKeyGroupSignal = false;
377 if (isKeyGroupSignal) continue; // skip if signal
378
379 int kj = ij + 1;
380 while (kj < int(groupInfoVector.size())) {
381 double otherGroupIntegral = std::get<0>(groupInfoVector[kj]);
382 double otherGroupCenter = std::get<1>(groupInfoVector[kj]);
383 bool isOtherGroupSignal = true;
384 if (otherGroupIntegral != 0. &&
385 (otherGroupCenter < m_usedPars.expectedSignalTime[0] || otherGroupCenter > m_usedPars.expectedSignalTime[2]))
386 isOtherGroupSignal = false;
387 if (!isOtherGroupSignal && (otherGroupIntegral > keyGroupIntegral)) break;
388 groupInfoVector[kj - 1] = groupInfoVector[kj];
389 kj++;
390 }
391 groupInfoVector[kj - 1] = keyGroup;
392 }
393}
394
395
396void SVDTimeGroupingModule::sortSignalGroups(std::vector<GroupInfo>& groupInfoVector)
397{
398 if (m_usedPars.signalLifetime > 0.) {
399 GroupInfo keyGroup;
400 for (int ij = 1; ij < int(groupInfoVector.size()); ij++) {
401 keyGroup = groupInfoVector[ij];
402 double keyGroupIntegral = std::get<0>(keyGroup);
403 if (keyGroupIntegral <= 0) break;
404 double keyGroupCenter = std::get<1>(keyGroup);
405 bool isKeyGroupSignal = true;
406 if (keyGroupIntegral > 0 &&
407 (keyGroupCenter < m_usedPars.expectedSignalTime[0] || keyGroupCenter > m_usedPars.expectedSignalTime[2]))
408 isKeyGroupSignal = false;
409 if (!isKeyGroupSignal) break; // skip the backgrounds
410
411 double keyWt = keyGroupIntegral * TMath::Exp(-std::fabs(keyGroupCenter - m_usedPars.expectedSignalTime[1]) /
413 int kj = ij - 1;
414 while (kj >= 0) {
415 double otherGroupIntegral = std::get<0>(groupInfoVector[kj]);
416 double otherGroupCenter = std::get<1>(groupInfoVector[kj]);
417 double grWt = otherGroupIntegral * TMath::Exp(-std::fabs(otherGroupCenter - m_usedPars.expectedSignalTime[1]) /
419 if (grWt > keyWt) break;
420 groupInfoVector[kj + 1] = groupInfoVector[kj];
421 kj--;
422 }
423 groupInfoVector[kj + 1] = keyGroup;
424 }
425 }
426}
427
428
429void SVDTimeGroupingModule::assignGroupIdsToClusters(TH1D& hist, std::vector<GroupInfo>& groupInfoVector)
430{
431 int totClusters = m_svdClusters.getEntries();
432 double tRangeLow = hist.GetXaxis()->GetXmin();
433 double tRangeHigh = hist.GetXaxis()->GetXmax();
434
435 // assign all clusters groupId = -1 if no groups are found
436 if (int(groupInfoVector.size()) == 0)
437 for (int jk = 0; jk < totClusters; jk++)
438 m_svdClusters[jk]->setTimeGroupId().push_back(-1);
439
440 // loop over all the groups
441 // some groups may be dummy, ie, (0,0,0). they are skipped
442 for (int ij = 0; ij < int(groupInfoVector.size()); ij++) {
443
444 double pars[3] = {
445 std::get<0>(groupInfoVector[ij]),
446 std::get<1>(groupInfoVector[ij]),
447 std::get<2>(groupInfoVector[ij])
448 };
449
450 if (pars[2] == 0 && ij != int(groupInfoVector.size()) - 1) continue;
451 // do not continue the last loop.
452 // we assign the group Id to leftover clusters at the last loop.
453
454 // for this group, accept the clusters falling within 5(default) sigma of group center
455 double lowestAcceptedTime = pars[1] - m_usedPars.acceptSigmaN * pars[2];
456 double highestAcceptedTime = pars[1] + m_usedPars.acceptSigmaN * pars[2];
457 if (lowestAcceptedTime < tRangeLow) lowestAcceptedTime = tRangeLow;
458 if (highestAcceptedTime > tRangeHigh) highestAcceptedTime = tRangeHigh;
459 B2DEBUG(21, " group " << ij
460 << " lowestAcceptedTime " << lowestAcceptedTime
461 << " highestAcceptedTime " << highestAcceptedTime);
462
463 // now loop over all the clusters to check which clusters fall in this range
464 for (int jk = 0; jk < totClusters; jk++) {
465 double clsTime = m_svdClusters[jk]->getClsTime();
466
467 if (pars[2] != 0 && // if the last group is dummy, we straight go to leftover clusters
468 clsTime >= lowestAcceptedTime && clsTime <= highestAcceptedTime) {
469
470 // assigning groupId starting from 0
471 m_svdClusters[jk]->setTimeGroupId().push_back(ij);
472
473 // writing group info to clusters.
474 // this is independent of group id, that means,
476 m_svdClusters[jk]->setTimeGroupInfo().push_back(GroupInfo(pars[0], pars[1], pars[2]));
477
478 B2DEBUG(29, " accepted cluster " << jk
479 << " clsTime " << clsTime
480 << " GroupId " << m_svdClusters[jk]->getTimeGroupId().back());
481
482 } else {
483
484 B2DEBUG(29, " rejected cluster " << jk
485 << " clsTime " << clsTime);
486
487 if (ij == int(groupInfoVector.size()) - 1 && // we are now at the last loop
488 int(m_svdClusters[jk]->getTimeGroupId().size()) == 0) { // leftover clusters
489
490 if (m_usedPars.includeOutOfRangeClusters && clsTime < tRangeLow)
491 m_svdClusters[jk]->setTimeGroupId().push_back(m_usedPars.maxGroups + 1); // underflow
492 else if (m_usedPars.includeOutOfRangeClusters && clsTime > tRangeHigh)
493 m_svdClusters[jk]->setTimeGroupId().push_back(m_usedPars.maxGroups + 2); // overflow
494 else
495 m_svdClusters[jk]->setTimeGroupId().push_back(-1); // orphan
496
497
498 B2DEBUG(29, " leftover cluster " << jk
499 << " GroupId " << m_svdClusters[jk]->getTimeGroupId().back());
500
501 }
502 }
503 } // end of loop over all clusters
504 } // end of loop over groups
505
506}
Base class for Modules.
Definition: Module.h:72
void setDescription(const std::string &description)
Sets the description of the module.
Definition: Module.cc:214
void setPropertyFlags(unsigned int propertyFlags)
Sets the flags for the module properties.
Definition: Module.cc:208
@ c_ParallelProcessingCertified
This module can be run in parallel processing mode safely (All I/O must be done through the data stor...
Definition: Module.h:80
bool m_useParamFromDB
if true use the configuration from SVDTimeGroupingConfiguration DB.
std::string m_svdClustersName
SVDCluster collection name.
SVDTimeGroupingParameters m_usedParsIn3Samples
module parameter values for 3-sample DAQ taken from SVDTimeGroupingConfiguration dbobject.
SVDTimeGroupingParameters m_usedPars
module parameter values used.
virtual void initialize() override
Init the module.
StoreArray< SVDCluster > m_svdClusters
the storeArray for svdClusters as member, is faster than recreating link for each event
virtual void event() override
EventWise jobs.
SVDTimeGroupingParameters m_usedParsIn6Samples
module parameter values for 6-sample DAQ taken from SVDTimeGroupingConfiguration dbobject.
void resizeToMaxSize(std::vector< GroupInfo > &groupInfoVector)
increase the size of vector to max, this helps in sorting
void sortBackgroundGroups(std::vector< GroupInfo > &groupInfoVector)
Sort Background Groups.
bool m_forceGroupingFromDB
if true use configuration from the SVDRecConfiguration DB.
void beginRun() override
configure
void assignGroupIdsToClusters(TH1D &hist, std::vector< GroupInfo > &groupInfoVector)
Assign groupId to the clusters.
std::string m_svdEventInfoName
Name of the collection to use for the SVDEventInfo.
void createAndFillHistorgram(TH1D &hist)
Create Histogram and Fill cluster time in it.
void sortSignalGroups(std::vector< GroupInfo > &groupInfoVector)
Sort Signals.
void searchGausPeaksInHistogram(TH1D &hist, std::vector< GroupInfo > &groupInfoVector)
Find Gaussian components in a Histogram.
DBObjPtr< SVDRecoConfiguration > m_recoConfig
SVD Reconstruction Configuration payload.
bool m_isEnabledIn6Samples
Enables the module if true for 6-sample DAQ mode.
DBObjPtr< SVDTimeGroupingConfiguration > m_groupingConfig
SVDTimeGrouping Configuration payload.
bool m_isEnabledIn3Samples
Enables the module if true for 3-sample DAQ mode.
const std::string & getName() const
Return name under which the object is saved in the DataStore.
bool isRequired(const std::string &name="")
Ensure this array/object has been registered previously.
int getEntries() const
Get the number of objects in the array.
Definition: StoreArray.h:216
Type-safe access to single objects in the data store.
Definition: StoreObjPtr.h:96
bool isValid() const
Check whether the object was created.
Definition: StoreObjPtr.h:111
void addParam(const std::string &name, T &paramVariable, const std::string &description, const T &defaultValue)
Adds a new parameter to the module.
Definition: Module.h:560
#define REG_MODULE(moduleName)
Register the given module (without 'Module' suffix) with the framework.
Definition: Module.h:650
std::tuple< double, double, double > GroupInfo
typedef to be used to store Gauss parameters (integral, center, sigma)
int getSensorType(const VxdID &sensorID)
Get Sensor Type of SVD sensors.
double myGaus(const double *x, const double *par)
Gaus function to be used in the fit.
void subtractGausFromHistogram(TH1D &hist, const double &integral, const double &center, const double &sigma, const double &sigmaN)
Subtract a Gaussian from a histogram.
void addGausToHistogram(TH1D &hist, const double &integral, const double &center, const double &sigma, const double &sigmaN, const bool &isAddition=true)
Add (or Subtract) a Gaussian to (or from) a histogram.
Abstract base class for different kinds of events.
Float_t removeSigmaN
Remove upto this sigma of fitted gaus from histogram.
Float_t signalLifetime
Group prominence is weighted with exponential weight with a lifetime defined by this parameter [ns].
Int_t rebinningFactor
Time bin width is 1/rebinningFactor [ns].
Float_t fillSigmaN
Number of Gaussian sigmas used to fill the time histogram for each cluster.
Float_t acceptSigmaN
Clusters are tagged within this of fitted group.
Float_t fitRangeHalfWidth
Half width of the range in which the fit for the peak-search is performed [ns].
Float_t fracThreshold
Minimum fraction of candidates in a peak (wrt to the highest peak) considered for fitting in the peak...
Float_t limitSigma[2]
Limit of cluster time sigma for the fit for the peak-search [ns].
Float_t tRange[2]
Expected range of svd time histogram [ns].
Bool_t writeGroupInfo
Write group info in SVDCluster, otherwise kept empty.
std::vector< Float_t > clsSigma[3][2]
Cls-time resolution based on sensor side and type, types -> 0: L3, 1: Barrel, 2: Forward.
Bool_t includeOutOfRangeClusters
Assign groups to under and overflow.
Int_t maxGroups
maximum number of groups to be accepted.
Float_t expectedSignalTime[3]
Expected time-range and mean of the signal [ns].