Belle II Software  release-08-01-10
theano.py
1 #!/usr/bin/env python3
2 
3 
10 
11 try:
12  import theano
13  import theano.tensor
14 except ImportError:
15  print("Please install theano: pip3 install theano")
16  import sys
17  sys.exit(1)
18 
19 import numpy
20 
21 
22 class State:
23  """
24  State class for proper handling of parameters and data during function calls. This is a very brief theano example.
25  """
26 
27  def __init__(self, x=None, y=None, params=None, cost=None, updates=None, train_function=None, eval_function=None):
28  """
29  Constructor of the State class
30  """
31  # TODO: make serializable with __getstate__(), __setstate__()
32 
33  self.xx = x
34 
35  self.yy = y
36 
37 
38  self.paramsparams = params
39 
40  self.costcost = cost
41 
42 
43  self.updatesupdates = updates
44 
45 
46  self.train_functiontrain_function = train_function
47 
48  self.eval_functioneval_function = eval_function
49 
50 
51 def get_model(number_of_features, number_of_spectators, number_of_events, training_fraction, parameters):
52 
53  x = theano.tensor.matrix('x')
54  y = theano.tensor.vector('y', dtype='float32')
55 
56  # learning_rate = parameters.get('learning_rate', 0.1)
57  learning_rate = 0.1
58 
59  n_in = number_of_features
60  n_out = 1
61  rng = numpy.random.RandomState(1234)
62  w_values = numpy.asarray(
63  rng.uniform(
64  low=-numpy.sqrt(6. / (n_in + n_out)),
65  high=numpy.sqrt(6. / (n_in + n_out)),
66  size=(n_in, n_out)
67  ),
68  dtype=theano.config.floatX
69  )
70 
71  w_values *= 4
72  w = theano.shared(value=w_values, name='W', borrow=True)
73 
74  b_values = numpy.zeros((n_out,), dtype=theano.config.floatX)
75  b = theano.shared(value=b_values, name='b', borrow=True)
76 
77  activation = theano.tensor.nnet.sigmoid
78 
79  output = activation(theano.tensor.dot(x, w) + b)
80 
81  cost = theano.tensor.nnet.binary_crossentropy(output.T, y).mean()
82 
83  params = [w, b]
84 
85  grad_params = [theano.tensor.grad(cost, param) for param in params]
86 
87  updates = [(param, param - learning_rate * gparam) for param, gparam in zip(params, grad_params)]
88 
89  train_function = theano.function(
90  inputs=[x, y],
91  outputs=cost,
92  updates=updates
93  )
94 
95  eval_function = theano.function(
96  inputs=[x],
97  outputs=output
98  )
99 
100  return State(x, y, params, cost, updates, train_function, eval_function)
101 
102 
103 def feature_importance(state):
104  """
105  Return a list containing the feature importances
106  """
107  return []
108 
109 
110 def load(obj):
111  state = State(eval_function=obj[0])
112  return state
113 
114 
115 def apply(state, X):
116  result = state.eval_function(X)
117  return numpy.require(result, dtype=numpy.float32, requirements=['A', 'W', 'C', 'O'])
118 
119 
120 def begin_fit(state, Xvalid, Svalid, yvalid, wvalid, nBatches):
121  return state
122 
123 
124 def partial_fit(state, X, S, y, w, epoch, batch):
125  avg_cost = state.train_function(X, y) / len(y)
126  print("Epoch:", f'{int(epoch):04}', "Batch:", f'{int(batch):04}', "cost=", f"{avg_cost:.9f}")
127  return True
128 
129 
130 def end_fit(state):
131  # FIXME: this might not work as intended
132  # a better method can be found at: http://deeplearning.net/software/theano/tutorial/loading_and_saving.html
133  return [state.eval_function]
y
theano shared variable y
Definition: theano.py:35
eval_function
theano function for evaluation
Definition: theano.py:48
x
theano shared variable x
Definition: theano.py:33
def __init__(self, x=None, y=None, params=None, cost=None, updates=None, train_function=None, eval_function=None)
Definition: theano.py:27
train_function
theano function for training
Definition: theano.py:46