Belle II Software  release-06-02-00
theano.py
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*
3 
4 
11 
12 try:
13  import theano
14  import theano.tensor
15 except ImportError:
16  print("Please install theano: pip3 install theano")
17  import sys
18  sys.exit(1)
19 
20 import numpy
21 
22 
23 class State(object):
24  """
25  State class for proper handling of parameters and data during function calls. This is a very brief theano example.
26  """
27 
28  def __init__(self, x=None, y=None, params=None, cost=None, updates=None, train_function=None, eval_function=None):
29  """
30  Constructor of the State class
31  """
32  # TODO: make serializable with __getstate__(), __setstate__()
33 
34  self.xx = x
35 
36  self.yy = y
37 
38 
39  self.paramsparams = params
40 
41  self.costcost = cost
42 
43 
44  self.updatesupdates = updates
45 
46 
47  self.train_functiontrain_function = train_function
48 
49  self.eval_functioneval_function = eval_function
50 
51 
52 def get_model(number_of_features, number_of_spectators, number_of_events, training_fraction, parameters):
53 
54  x = theano.tensor.matrix('x')
55  y = theano.tensor.vector('y', dtype='float32')
56 
57  # learning_rate = parameters.get('learning_rate', 0.1)
58  learning_rate = 0.1
59 
60  n_in = number_of_features
61  n_out = 1
62  rng = numpy.random.RandomState(1234)
63  w_values = numpy.asarray(
64  rng.uniform(
65  low=-numpy.sqrt(6. / (n_in + n_out)),
66  high=numpy.sqrt(6. / (n_in + n_out)),
67  size=(n_in, n_out)
68  ),
69  dtype=theano.config.floatX
70  )
71 
72  w_values *= 4
73  w = theano.shared(value=w_values, name='W', borrow=True)
74 
75  b_values = numpy.zeros((n_out,), dtype=theano.config.floatX)
76  b = theano.shared(value=b_values, name='b', borrow=True)
77 
78  activation = theano.tensor.nnet.sigmoid
79 
80  output = activation(theano.tensor.dot(x, w) + b)
81 
82  cost = theano.tensor.nnet.binary_crossentropy(output.T, y).mean()
83 
84  params = [w, b]
85 
86  grad_params = [theano.tensor.grad(cost, param) for param in params]
87 
88  updates = [(param, param - learning_rate * gparam) for param, gparam in zip(params, grad_params)]
89 
90  train_function = theano.function(
91  inputs=[x, y],
92  outputs=cost,
93  updates=updates
94  )
95 
96  eval_function = theano.function(
97  inputs=[x],
98  outputs=output
99  )
100 
101  return State(x, y, params, cost, updates, train_function, eval_function)
102 
103 
104 def feature_importance(state):
105  """
106  Return a list containing the feature importances
107  """
108  return []
109 
110 
111 def load(obj):
112  state = State(eval_function=obj[0])
113  return state
114 
115 
116 def apply(state, X):
117  result = state.eval_function(X)
118  return numpy.require(result, dtype=numpy.float32, requirements=['A', 'W', 'C', 'O'])
119 
120 
121 def begin_fit(state, Xvalid, Svalid, yvalid, wvalid):
122  return state
123 
124 
125 def partial_fit(state, X, S, y, w, epoch):
126  avg_cost = state.train_function(X, y) / len(y)
127  print("Epoch:", '%04d' % (epoch), "cost=", "{:.9f}".format(avg_cost))
128  return True
129 
130 
131 def end_fit(state):
132  # FIXME: this might not work as intended
133  # a better method can be found at: http://deeplearning.net/software/theano/tutorial/loading_and_saving.html
134  return [state.eval_function]
y
theano shared variable y
Definition: theano.py:36
eval_function
theano function for evaluation
Definition: theano.py:49
x
theano shared variable x
Definition: theano.py:34
def __init__(self, x=None, y=None, params=None, cost=None, updates=None, train_function=None, eval_function=None)
Definition: theano.py:28
train_function
theano function for training
Definition: theano.py:47