19 My dense neural network
25 param: number_of_features number of input variables
31 torch.nn.Linear(number_of_features, 128),
33 torch.nn.Linear(128, 128),
35 torch.nn.Linear(128, 1),
47def get_model(number_of_features, number_of_spectators, number_of_events, training_fraction, parameters):
49 Returns default torch model
55 myModel(number_of_features).to(
"cpu"),
57 number_of_features=number_of_features,
62 state.optimizer = torch.optim.SGD(state.model.parameters(), parameters.get(
'learning_rate', 1e-2))
65 state.loss_fn = torch.nn.BCELoss
73if __name__ ==
"__main__":
74 from basf2
import conditions
80 conditions.testing_payloads = [
81 'localdb/database.txt'
84 general_options = basf2_mva.GeneralOptions()
85 general_options.m_datafiles = basf2_mva.vector(
"train.root")
86 general_options.m_identifier =
"Simple"
87 general_options.m_treename =
"tree"
88 variables = [
'M',
'p',
'pt',
'pz',
89 'daughter(0, p)',
'daughter(0, pz)',
'daughter(0, pt)',
90 'daughter(1, p)',
'daughter(1, pz)',
'daughter(1, pt)',
91 'daughter(2, p)',
'daughter(2, pz)',
'daughter(2, pt)',
92 'chiProb',
'dr',
'dz',
93 'daughter(0, dr)',
'daughter(1, dr)',
94 'daughter(0, dz)',
'daughter(1, dz)',
95 'daughter(0, chiProb)',
'daughter(1, chiProb)',
'daughter(2, chiProb)',
96 'daughter(0, kaonID)',
'daughter(0, pionID)',
97 'daughterInvM(0, 1)',
'daughterInvM(0, 2)',
'daughterInvM(1, 2)']
98 general_options.m_variables = basf2_mva.vector(*variables)
99 general_options.m_target_variable =
"isSignal"
101 specific_options = basf2_mva.PythonOptions()
102 specific_options.m_framework =
"torch"
103 specific_options.m_steering_file =
'mva/examples/torch/simple.py'
105 specific_options.m_nIterations = 64
106 specific_options.m_mini_batch_size = 256
107 specific_options.m_config = json.dumps({
'learning_rate': 1e-2})
108 specific_options.m_training_fraction = 0.8
109 specific_options.m_normalise =
False
111 training_start = time.time()
112 basf2_mva.teacher(general_options, specific_options)
113 training_stop = time.time()
114 training_time = training_stop - training_start
117 inference_start = time.time()
118 test_data = [
"test.root"]
119 p, t = method.apply_expert(basf2_mva.vector(*test_data), general_options.m_treename)
120 inference_stop = time.time()
121 inference_time = inference_stop - inference_start
123 print(
"Torch", training_time, inference_time, auc)
calculate_auc_efficiency_vs_background_retention(p, t, w=None)
network
a dense model with one hidden layer
__init__(self, number_of_features)