12 import tensorflow
as tf
19 old_time = time.time()
22 def get_model(number_of_features, number_of_spectators, number_of_events, training_fraction, parameters):
24 number_of_features *= 20
25 tf.reset_default_graph()
26 x = tf.placeholder(tf.float32, [
None, number_of_features])
27 y = tf.placeholder(tf.float32, [
None, 1])
29 def layer(x, shape, name, unit=tf.sigmoid):
30 with tf.name_scope(name):
31 weights = tf.Variable(tf.truncated_normal(shape, stddev=1.0 / np.sqrt(float(shape[0]))), name=
'weights')
32 biases = tf.Variable(tf.constant(0.0, shape=[shape[1]]), name=
'biases')
33 weight_decay = tf.reduce_sum(0.001 * tf.nn.l2_loss(weights))
34 tf.add_to_collection(
'losses', weight_decay)
35 layer = unit(tf.matmul(x, weights) + biases)
38 inference_hidden1 = layer(x, [number_of_features, number_of_features],
'inference_hidden1')
39 inference_hidden2 = layer(inference_hidden1, [number_of_features, number_of_features],
'inference_hidden2')
40 inference_hidden3 = layer(inference_hidden2, [number_of_features, number_of_features],
'inference_hidden3')
41 inference_hidden4 = layer(inference_hidden3, [number_of_features, number_of_features],
'inference_hidden4')
42 inference_hidden5 = layer(inference_hidden4, [number_of_features, number_of_features],
'inference_hidden5')
43 inference_hidden6 = layer(inference_hidden5, [number_of_features, number_of_features],
'inference_hidden6')
44 inference_hidden7 = layer(inference_hidden6, [number_of_features, number_of_features],
'inference_hidden7')
45 inference_hidden8 = layer(inference_hidden7, [number_of_features, number_of_features],
'inference_hidden8')
46 inference_hidden9 = layer(inference_hidden8, [number_of_features, number_of_features],
'inference_hidden9')
47 inference_hidden10 = layer(inference_hidden9, [number_of_features, number_of_features],
'inference_hidden10')
48 inference_hidden11 = layer(inference_hidden10, [number_of_features, number_of_features],
'inference_hidden11')
49 inference_hidden12 = layer(inference_hidden11, [number_of_features, number_of_features],
'inference_hidden12')
50 inference_hidden13 = layer(inference_hidden12, [number_of_features, number_of_features],
'inference_hidden13')
51 inference_hidden14 = layer(inference_hidden13, [number_of_features, number_of_features],
'inference_hidden14')
52 inference_hidden15 = layer(inference_hidden14, [number_of_features, number_of_features],
'inference_hidden15')
53 inference_activation = layer(inference_hidden15, [number_of_features, 1],
'inference_sigmoid', unit=tf.sigmoid)
56 inference_loss = (-tf.reduce_sum(y * tf.log(inference_activation + epsilon) +
57 (1.0 - y) * tf.log(1 - inference_activation + epsilon)) +
58 tf.reduce_sum(tf.get_collection(
'losses')))
60 inference_optimizer = tf.train.AdamOptimizer(learning_rate=0.01)
61 inference_minimize = inference_optimizer.minimize(inference_loss)
63 init = tf.global_variables_initializer()
65 config = tf.ConfigProto()
66 config.gpu_options.allow_growth =
True
67 session = tf.Session(config=config)
70 state = State(x, y, inference_activation, inference_loss, inference_minimize, session)
74 def partial_fit(state, X, S, y, w, epoch):
76 Pass received data to tensorflow session
82 indices = np.arange(len(X))
84 np.random.shuffle(indices)
85 for pos
in range(0, len(indices), batch_size):
87 if pos + batch_size >= len(indices):
89 index = indices[pos: pos + batch_size]
90 x_batch = np.repeat(X[index], 20, axis=1)
93 feed_dict = {state.x: x_batch, state.y: y_batch}
94 state.session.run(state.optimizer, feed_dict=feed_dict)
97 avg_cost = state.session.run(state.cost, feed_dict=feed_dict)
98 new_time = time.time()
99 print(
"Time Difference", new_time - old_time)
101 print(
"Epoch:",
'%04d' % (epoch),
"cost=",
"{:.9f}".format(avg_cost))
105 if __name__ ==
"__main__":
106 from basf2
import conditions
108 conditions.testing_payloads = [
109 'localdb/database.txt'
112 general_options = basf2_mva.GeneralOptions()
113 general_options.m_datafiles = basf2_mva.vector(
"train.root")
114 general_options.m_identifier =
"Tensorflow"
115 general_options.m_treename =
"tree"
116 variables = [
'M',
'p',
'pt',
'pz',
117 'daughter(0, p)',
'daughter(0, pz)',
'daughter(0, pt)',
118 'daughter(1, p)',
'daughter(1, pz)',
'daughter(1, pt)',
119 'daughter(2, p)',
'daughter(2, pz)',
'daughter(2, pt)',
120 'chiProb',
'dr',
'dz',
121 'daughter(0, dr)',
'daughter(1, dr)',
122 'daughter(0, dz)',
'daughter(1, dz)',
123 'daughter(0, chiProb)',
'daughter(1, chiProb)',
'daughter(2, chiProb)',
124 'daughter(0, kaonID)',
'daughter(0, pionID)',
125 'daughterInvariantMass(0, 1)',
'daughterInvariantMass(0, 2)',
'daughterInvariantMass(1, 2)']
126 general_options.m_variables = basf2_mva.vector(*variables)
127 general_options.m_target_variable =
"isSignal"
129 specific_options = basf2_mva.PythonOptions()
130 specific_options.m_framework =
"tensorflow"
131 specific_options.m_steering_file =
'mva/examples/tensorflow/simple_deep.py'
132 specific_options.m_normalize =
True
134 training_start = time.time()
135 basf2_mva.teacher(general_options, specific_options)
136 training_stop = time.time()
137 training_time = training_stop - training_start
139 inference_start = time.time()
140 test_data = [
"test.root"] * 10
141 p, t = method.apply_expert(basf2_mva.vector(*test_data), general_options.m_treename)
142 inference_stop = time.time()
143 inference_time = inference_stop - inference_start
145 print(
"Tensorflow", training_time, inference_time, auc)
def calculate_roc_auc(p, t)