20 from tensorflow.keras.layers
import Input, Dense, Dropout, BatchNormalization
21 from tensorflow.keras.models
import Model
22 from tensorflow.keras.optimizers
import Adam
23 from tensorflow.keras.losses
import binary_crossentropy
24 from tensorflow.keras.activations
import sigmoid, tanh
25 from tensorflow.keras.callbacks
import Callback
28 old_time = time.time()
31 def get_model(number_of_features, number_of_spectators, number_of_events, training_fraction, parameters):
33 Build feed forward keras model
35 input = Input(shape=(number_of_features,))
37 net = Dense(units=number_of_features, activation=tanh)(input)
39 net = Dense(units=number_of_features, activation=tanh)(net)
40 net = BatchNormalization()(net)
42 net = Dense(units=number_of_features, activation=tanh)(net)
43 net = Dropout(rate=0.4)(net)
45 output = Dense(units=1, activation=sigmoid)(net)
47 state = State(Model(input, output))
49 state.model.compile(optimizer=Adam(lr=0.01), loss=binary_crossentropy, metrics=[
'accuracy'])
56 def begin_fit(state, Xtest, Stest, ytest, wtest, nBatches):
58 Returns just the state object
66 def partial_fit(state, X, S, y, w, epoch, batch):
68 Pass received data to tensorflow
70 class TestCallback(Callback):
72 def on_epoch_end(self, epoch, logs=None):
73 loss, acc = state.model.evaluate(state.Xtest, state.ytest, verbose=0, batch_size=1000)
74 loss2, acc2 = state.model.evaluate(X[:10000], y[:10000], verbose=0, batch_size=1000)
75 print(
'\nTesting loss: {}, acc: {}'.format(loss, acc))
76 print(
'Training loss: {}, acc: {}'.format(loss2, acc2))
78 state.model.fit(X, y, batch_size=500, epochs=10, callbacks=[TestCallback()])
82 if __name__ ==
"__main__":
83 from basf2
import conditions
85 conditions.testing_payloads = [
86 'localdb/database.txt'
89 general_options = basf2_mva.GeneralOptions()
90 general_options.m_datafiles = basf2_mva.vector(
"train.root")
91 general_options.m_identifier =
"deep_keras"
92 general_options.m_treename =
"tree"
93 variables = [
'M',
'p',
'pt',
'pz',
94 'daughter(0, p)',
'daughter(0, pz)',
'daughter(0, pt)',
95 'daughter(1, p)',
'daughter(1, pz)',
'daughter(1, pt)',
96 'daughter(2, p)',
'daughter(2, pz)',
'daughter(2, pt)',
97 'chiProb',
'dr',
'dz',
98 'daughter(0, dr)',
'daughter(1, dr)',
99 'daughter(0, dz)',
'daughter(1, dz)',
100 'daughter(0, chiProb)',
'daughter(1, chiProb)',
'daughter(2, chiProb)',
101 'daughter(0, kaonID)',
'daughter(0, pionID)',
102 'daughterInvM(0, 1)',
'daughterInvM(0, 2)',
'daughterInvM(1, 2)']
103 general_options.m_variables = basf2_mva.vector(*variables)
104 general_options.m_target_variable =
"isSignal"
106 specific_options = basf2_mva.PythonOptions()
107 specific_options.m_framework =
"keras"
108 specific_options.m_steering_file =
'mva/examples/keras/simple_deep.py'
109 specific_options.m_normalize =
True
110 specific_options.m_training_fraction = 0.9
112 training_start = time.time()
113 basf2_mva.teacher(general_options, specific_options)
114 training_stop = time.time()
115 training_time = training_stop - training_start
118 inference_start = time.time()
119 test_data = [
"test.root"] * 10
120 p, t = method.apply_expert(basf2_mva.vector(*test_data), general_options.m_treename)
121 inference_stop = time.time()
122 inference_time = inference_stop - inference_start
124 print(
"Tensorflow.keras", training_time, inference_time, auc)
def calculate_auc_efficiency_vs_background_retention(p, t, w=None)