12from tempfile
import TemporaryDirectory
25 My dense neural network
31 number_of_features: number of input features
37 nn.Linear(number_of_features, 128),
53def fit(model, filename, treename, variables, target_variable):
54 with uproot.open({filename: treename})
as tree:
56 map(ROOT.Belle2.MakeROOTCompatible.makeROOTCompatible, variables),
59 y = tree[target_variable].
array(library=
"np")
60 ds = torch.utils.data.TensorDataset(
61 torch.tensor(X, dtype=torch.float32),
62 torch.tensor(y, dtype=torch.float32)[:, np.newaxis]
64 dl = torch.utils.data.DataLoader(ds, batch_size=256, shuffle=
True)
65 opt = torch.optim.Adam(model.parameters())
66 for epoch
in range(50):
67 print(f
"Epoch {epoch}", end=
", ")
72 loss = torch.nn.functional.binary_cross_entropy(p, by)
75 losses.append(loss.detach().item())
76 print(f
"Loss = {np.mean(losses)}", end=
"\r")
80def save_onnx_to_database(model, general_options, specific_options, identifier):
81 with TemporaryDirectory()
as tempdir:
84 print(
"convert to onnx")
87 (torch.randn(1, len(general_options.m_variables)),),
89 input_names=[
"input"],
90 output_names=[
"output"],
92 wf = ROOT.Belle2.MVA.Weightfile()
93 general_options.m_method = specific_options.getMethod()
94 wf.addOptions(general_options)
95 wf.addOptions(specific_options)
96 wf.addFile(
"ONNX_Modelfile",
"model.onnx")
98 print(
"save to database")
99 ROOT.Belle2.MVA.Weightfile.saveToDatabase(wf, identifier)
102if __name__ ==
"__main__":
106 import basf2_mva_util
107 from basf2
import conditions
108 from basf2
import find_file
110 conditions.testing_payloads = [
111 'localdb/database.txt'
114 train_file = find_file(
"mva/train_D0toKpipi.root",
"examples")
115 test_file = find_file(
"mva/test_D0toKpipi.root",
"examples")
117 general_options = basf2_mva.GeneralOptions()
118 general_options.m_datafiles = basf2_mva.vector(train_file)
119 general_options.m_identifier =
"Simple"
120 general_options.m_treename =
"tree"
121 variables = [
'M',
'p',
'pt',
'pz',
122 'daughter(0, p)',
'daughter(0, pz)',
'daughter(0, pt)',
123 'daughter(1, p)',
'daughter(1, pz)',
'daughter(1, pt)',
124 'daughter(2, p)',
'daughter(2, pz)',
'daughter(2, pt)',
125 'chiProb',
'dr',
'dz',
126 'daughter(0, dr)',
'daughter(1, dr)',
127 'daughter(0, dz)',
'daughter(1, dz)',
128 'daughter(0, chiProb)',
'daughter(1, chiProb)',
'daughter(2, chiProb)',
129 'daughter(0, kaonID)',
'daughter(0, pionID)',
130 'daughterInvM(0, 1)',
'daughterInvM(0, 2)',
'daughterInvM(1, 2)']
131 general_options.m_variables = basf2_mva.vector(*variables)
132 general_options.m_target_variable =
"isSignal"
134 specific_options = basf2_mva.ONNXOptions()
136 model =
Model(len(variables))
140 general_options.m_treename,
142 general_options.m_target_variable,
144 save_onnx_to_database(model, general_options, specific_options,
"ONNXTest")
147 inference_start = time.time()
148 test_data = [test_file]
149 p, t = method.apply_expert(basf2_mva.vector(*test_data), general_options.m_treename)
150 inference_stop = time.time()
151 inference_time = inference_stop - inference_start
153 print(
"ONNX", inference_time, auc)
calculate_auc_efficiency_vs_background_retention(p, t, w=None)
network
a dense model with one hidden layer
__init__(self, number_of_features)