9#include <svd/reconstruction/NNWaveFitter.h>
14#include <boost/property_tree/ptree.hpp>
15#include <boost/property_tree/xml_parser.hpp>
16#include <framework/logging/Logger.h>
38 using namespace boost::property_tree;
39 using boost::property_tree::ptree;
44 B2DEBUG(400,
"Reading xml");
47 read_xml(ss, propertyTree);
48 }
catch (
const ptree_error& e) {
49 B2ERROR(
"Failed to parse xml data: " << e.what());
50 }
catch (std::exception
const& ex) {
51 B2ERROR(
"STD excpetion " << ex.what() <<
" in parsing.");
57 string function_name = propertyTree.get<
string>(
"PMML.NeuralNetwork.<xmlattr>.functionName");
58 if (function_name !=
"classification") {
59 B2ERROR(
"This is incorrect network, expected multiclass classifier.");
62 B2DEBUG(400,
"Claasifier confirmed.");
64 string activationFunction = propertyTree.get<
string>(
"PMML.NeuralNetwork.<xmlattr>.activationFunction");
65 if (activationFunction !=
"rectifier") {
66 B2ERROR(
"Expected rectifier (relu) activation, found " << activationFunction <<
"instead.");
69 B2DEBUG(400,
"Activation confirmed.");
71 string waveFormType = propertyTree.get<
string>(
"PMML.MiningBuildTask.Training.Waveform");
72 B2DEBUG(400,
"Waveform set to " << waveFormType);
73 }
catch (
const ptree_error& e) {
74 B2ERROR(
"PropertyTree excpetion : " << e.what() <<
" in network check.");
80 string pathString =
"PMML.MiningBuildTask.NetworkParameters";
81 map<size_t, size_t> m_layer_sizes_map;
82 for (
const auto& layer_tag : propertyTree.get_child(pathString)) {
83 if (layer_tag.first ==
"<xmlattr>")
continue;
84 size_t layer_no = layer_tag.second.get<
size_t>(
"<xmlattr>.number");
85 size_t layer_size = layer_tag.second.get<
size_t>(
"<xmlattr>.size");
86 m_layer_sizes_map.insert(make_pair(layer_no, layer_size));
92 for (
size_t iLayer = 0; iLayer <
m_nLayers; ++iLayer)
94 for (
size_t iLayer = 1; iLayer <
m_nLayers; ++iLayer) {
101 for (
size_t iLayer = 0; iLayer <
m_nLayers; ++iLayer)
103 B2DEBUG(400,
"Network topology read.");
104 }
catch (
const ptree_error& e) {
105 B2ERROR(
"PropertyTree excpetion : " << e.what() <<
" when reading network topology.");
111 string pathString(
"PMML.MiningBuildTask.Training");
112 for (
const auto& param_tag : propertyTree.get_child(pathString)) {
113 if (param_tag.first ==
"Parameter") {
114 string valueString(param_tag.second.get<
string>(
"<xmlattr>.value"));
115 double low = param_tag.second.get<
double>(
"<xmlattr>.low");
116 double high = param_tag.second.get<
double>(
"<xmlattr>.high");
117 if (valueString ==
"amplitude") {
120 }
else if (valueString ==
"t0") {
123 }
else if (valueString ==
"tau") {
133 B2DEBUG(400,
"Read parameter bounds.");
134 }
catch (
const ptree_error& e) {
135 B2ERROR(
"PropertyTree excpetion: " << e.what() <<
" when reading parameter bounds.");
141 string pathString(
"PMML.DataDictionary.DataField");
144 for (
const auto& value_tag : propertyTree.get_child(pathString)) {
145 if (value_tag.first ==
"Value") {
146 size_t i = value_tag.second.get<
size_t>(
"<xmlattr>.value");
148 m_bins[0] = value_tag.second.get<
double>(
"<xmlattr>.lower") *
Unit::ns;
149 m_bins[i] = value_tag.second.get<
double>(
"<xmlattr>.upper") *
Unit::ns;
154 B2DEBUG(400,
"Outputs done.");
155 }
catch (
const ptree_error& e) {
156 B2ERROR(
"PropertyTree excpetion: " << e.what() <<
" when reading bin data.");
162 string pathString(
"PMML.NeuralNetwork");
163 for (
const auto& nl_tag : propertyTree.get_child(pathString)) {
164 if (nl_tag.first !=
"NeuralLayer")
continue;
165 B2DEBUG(400,
"Reading neural layers " << nl_tag.first <<
" " << nl_tag.second.size());
166 for (
const auto& neuron_tag : nl_tag.second) {
167 if (neuron_tag.first !=
"Neuron")
continue;
168 double bias = neuron_tag.second.get<
double>(
"<xmlattr>.bias");
169 string sid = neuron_tag.second.get<
string>(
"<xmlattr>.id");
170 size_t layer = stoi(sid.substr(0, 1));
171 size_t pos = stoi(sid.substr(2, sid.size()));
172 B2DEBUG(400,
"Reading neurons " << layer <<
"/" << pos <<
" bias: " << bias);
174 for (
const auto& con_tag : neuron_tag.second) {
175 if (con_tag.first !=
"Con")
continue;
176 double weight = con_tag.second.get<
double>(
"<xmlattr>.weight");
177 string sid2 = con_tag.second.get<
string>(
"<xmlattr>.from");
178 size_t pos2 = stoi(sid2.substr(sid2.find(
'/') + 1, sid2.size()));
180 B2DEBUG(400,
"Reading connections " << sid2 <<
" weight: " << weight);
184 B2DEBUG(400,
"Neurons done.");
185 }
catch (
const ptree_error& e) {
186 B2ERROR(
"PropertyTree excpetion: " << e.what() <<
" when reading neurons.");
195 ifstream dump(dumpname);
204 for (
size_t iLayer = 1; iLayer <
m_nLayers; ++iLayer) {
210 for (
size_t iRow = 0; iRow <
m_layerSizes[iLayer]; ++iRow) {
212 istringstream iline(line);
213 for (
size_t iCol = 0; iCol <
m_layerSizes[iLayer - 1]; ++iCol) {
216 if (fabs(value -
m_networkCoefs[iLayer].first(iRow, iCol)) > tol) {
217 B2DEBUG(90,
"Mismatch in weights in layer: " << iLayer
218 <<
" row: " << iRow <<
" col: " << iCol
220 <<
" Python: " << value
232 istringstream iline(line);
233 for (
size_t iRow = 0; iRow <
m_layerSizes[iLayer]; ++iRow) {
237 B2DEBUG(90,
"Mismatch in intercepts in layer: " << iLayer
240 <<
" Python: " << value
279 copy(samples.begin(), samples.end(), ostream_iterator<double>(os,
" "));
282 B2DEBUG(100, os.str());
285 os <<
"Layer states: " << endl;
286 for (
size_t iLayer = 1; iLayer <
m_nLayers; ++iLayer) {
293 B2DEBUG(100, os.str());
300 for (
size_t i = 0; i < result->size(); ++i)
304 os <<
"Result: " << endl;
305 copy(result->begin(), result->end(), ostream_iterator<double>(os,
" "));
307 B2DEBUG(100, os.str());
NNWaveFitter(std::string xmlData="")
Constructor constructs the wavefitter from data in xml file.
std::size_t m_nLayers
number of NN layers, read from xml
void setNetwrok(const std::string &xmlData)
Set proper network definition file.
TauEncoder m_tauCoder
Tau encoder class instance to scale tau values.
layerStatesType m_layerStates
vectors of layer states
int readNetworkData(const std::string &xmlFileName)
The method that actually reads the xml file.
nnBoundsType m_waveWidthBounds
Waveform width range of the network.
activationType relu
Rectifier activation.
nnFitterBins m_bins
NN time bin boundaries.
std::vector< std::size_t > m_layerSizes
NN layer sizes.
networkWeightsType m_networkCoefs
NN weights and intercepts.
bool checkCoefficients(const std::string &dumpname, double tol=1.0e-10)
Check NN data against a dump from Python.
std::shared_ptr< NNWaveFitTool > m_fitTool
FitterTool object allowing calculations on network fits.
nnBoundsType m_amplitudeBounds
Amplitude range of the network.
nnFitterBinData m_binCenters
centers of NN time bins
bool m_isValid
true if fitter was properly initialized
Eigen::VectorXd softmax(const Eigen::VectorXd &input)
Softmax function, normalization for the network's output layer.
WaveformShape m_wave
Wave function used in training the network.
nnBoundsType m_timeShiftBounds
Time shift range of the network.
std::shared_ptr< nnFitterBinData > getFit(const apvSamples &samples, double tau)
Fitting method Send data and get rseult structure.
void print(std::ostringstream &os) const
print tool
void setBounds(double min_amplitude, double max_amplitude, double min_tau, double max_tau)
Set encoder baounds (e.g.
double encodeTau(double tau) const
Return encoded value for a waveform width tau value.
Waveform generator This is a functor to calculate APV samples from waveform.
static const double ns
Standard of [time].
std::array< apvSampleBaseType, nAPVSamples > apvSamples
vector od apvSample BaseType objects
double w_betaprime(double t)
Beta-prime waveform shape, x^alpha/(1+x)^beta.
std::vector< double > nnFitterBinData
Vector of values defined for bins, such as bin times or bin probabilities.
Abstract base class for different kinds of events.