Belle II Software  release-06-02-00
keras_relational.py
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*-
3 
4 
11 
12 from keras.layers.core import Reshape
13 from keras import activations
14 from keras.activations import sigmoid, tanh
15 from keras.engine.topology import Layer
16 from keras import backend as K
17 import numpy as np
18 
19 
20 class Relations(Layer):
21  """
22  This is a class which implements Relational Layer into Keras.
23  Relational Layer compares every combination of two feature groups with shared weights.
24  Use this class as every other Layer in Keras.
25  Relevant Paper: https://arxiv.org/abs/1706.01427
26  RN(O) = f_phi(sum_phi(g_theta(o_i,o_j)))
27  For flexibility reason only the part g(o_i,o_j) is modelled
28  f_phi corresponds to a MLP net
29  To sum over all permutations please use GlobalAveragePooling1D from keras.
30  """
31 
32  def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
33  """
34  Init class.
35  """
36 
37 
38  self.number_featuresnumber_features = number_features
39 
40  self.number_groupsnumber_groups = 0
41 
42  self.hidden_feature_shapehidden_feature_shape = hidden_feature_shape
43 
44  self.activationactivation = activations.get(activation)
45 
46  self.group_lengroup_len = 0
47 
48  self.weightvariablesweightvariables = []
49 
50  self.combinationscombinations = 0
51 
52  super(Relations, self).__init__(**kwargs)
53 
54  def build(self, input_shape):
55  """
56  Build all weights for Relations Layer
57  :param input_shape: Input shape of tensor
58  :return: Nothing
59  """
60  # only accept 2D layers
61  assert(len(input_shape) == 3)
62 
63  self.number_groupsnumber_groups = input_shape[1]
64 
65  self.group_lengroup_len = input_shape[2]
66 
67  self.combinationscombinations = np.int32(np.math.factorial(self.number_groupsnumber_groups) / (2 * np.math.factorial(self.number_groupsnumber_groups - 2)))
68 
69  dense_shape = [2 * self.group_lengroup_len] + self.hidden_feature_shapehidden_feature_shape + [self.number_featuresnumber_features]
70 
71  for i in range(len(dense_shape[:-1])):
72  weights = self.add_weight(name='relation_weights_{}'.format(i),
73  shape=list(dense_shape[i:i + 2]), initializer='glorot_uniform', trainable=True)
74  bias = self.add_weight(name='relation_weights_{}'.format(i),
75  shape=(dense_shape[i + 1],), initializer='zeros', trainable=True)
76 
77  self.weightvariablesweightvariables.append([weights, bias])
78 
79  super(Relations, self).build(input_shape)
80 
81  def call(self, inputs):
82  """
83  Compute Relational Layer
84  :param inputs: input tensor
85  :return: output tensor
86  """
87  input_groups = [inputs[:, i, :] for i in range(self.number_groupsnumber_groups)]
88  outputs = []
89  for index, group1 in enumerate(input_groups[:-1]):
90  for group2 in input_groups[index + 1:]:
91  net = K.dot(K.concatenate([group1, group2]), self.weightvariablesweightvariables[0][0])
92  net = K.bias_add(net, self.weightvariablesweightvariables[0][1])
93  for variables in self.weightvariablesweightvariables[1:]:
94  net = self.activationactivation(net)
95  net = K.dot(net, variables[0])
96  net = K.bias_add(net, variables[1])
97  outputs.append(sigmoid(net))
98 
99  flat_result = K.concatenate(outputs)
100  return Reshape((self.combinationscombinations, self.number_featuresnumber_features,))(flat_result)
101 
102  def compute_output_shape(self, input_shape):
103  """
104  Compute Output shape
105  :return: Output shape
106  """
107  # only 2D layers
108  assert(len(input_shape) == 3)
109 
110  self.combinationscombinations = np.int32(np.math.factorial(self.number_groupsnumber_groups) / (2 * np.math.factorial(self.number_groupsnumber_groups - 2)))
111 
112  return (input_shape[0], self.combinationscombinations, self.number_featuresnumber_features)
113 
114  def get_config(self):
115  """
116  Config required for saving parameters in keras model.
117  """
118  config = {
119  'number_features': self.number_featuresnumber_features,
120  'hidden_feature_shape': self.hidden_feature_shapehidden_feature_shape,
121  'activation': activations.serialize(self.activationactivation)
122  }
123  base_config = super(Relations, self).get_config()
124  return dict(list(base_config.items()) + list(config.items()))
125 
126 
127 class EnhancedRelations(Layer):
128  """
129  This is a class which implements Relational Layer into Keras.
130  See Class Relations for details.
131  EnhanceRelations use an additional input for passing event information to every comparison:
132  RN(O) = f_phi(sum_phi(g_theta(o_i,o_j,q)))
133  q is fed in as second one dimensional input.
134  """
135 
136  def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
137  """
138  Init class.
139  """
140 
141 
142  self.number_featuresnumber_features = number_features
143 
144  self.number_groupsnumber_groups = 0
145 
146  self.hidden_feature_shapehidden_feature_shape = hidden_feature_shape
147 
148  self.activationactivation = activations.get(activation)
149 
150  self.group_lengroup_len = 0
151 
152  self.variablesvariables = []
153 
154  self.combinationscombinations = 0
155 
156  self.question_lenquestion_len = 0
157 
158  super(EnhancedRelations, self).__init__(**kwargs)
159 
160  def build(self, input_shape):
161  """
162  Build all weights for Relations Layer
163  :param input_shape: Input shape of tensor
164  :return: Nothing
165  """
166  # accept only 2 inputs
167  assert(len(input_shape) == 2)
168  # first input should be a 2D layers
169  assert(len(input_shape[0]) == 3)
170  # second input should be a 1D layers
171  assert(len(input_shape[1]) == 2)
172 
173  self.number_groupsnumber_groups = input_shape[0][1]
174 
175  self.group_lengroup_len = input_shape[0][2]
176 
177  self.question_lenquestion_len = input_shape[1][1]
178 
179  self.combinationscombinations = np.int32(np.math.factorial(self.number_groupsnumber_groups) / (2 * np.math.factorial(self.number_groupsnumber_groups - 2)))
180 
181  dense_shape = [2 * self.group_lengroup_len + self.question_lenquestion_len] + self.hidden_feature_shapehidden_feature_shape + [self.number_featuresnumber_features]
182 
183  for i in range(len(dense_shape[:-1])):
184  weights = self.add_weight(name='relation_weights_{}'.format(i),
185  shape=list(dense_shape[i:i + 2]), initializer='glorot_uniform', trainable=True)
186  bias = self.add_weight(name='relation_weights_{}'.format(i),
187  shape=(dense_shape[i + 1],), initializer='zeros', trainable=True)
188 
189  self.variablesvariables.append([weights, bias])
190 
191  super(EnhancedRelations, self).build(input_shape)
192 
193  def call(self, inputs):
194  """
195  Compute Relational Layer
196  :param inputs: input tensor
197  :return: output tensor
198  """
199  input_groups = [inputs[0][:, i, :] for i in range(self.number_groupsnumber_groups)]
200  questions = inputs[1]
201  outputs = []
202  for index, group1 in enumerate(input_groups[:-1]):
203  for group2 in input_groups[index + 1:]:
204  net = K.dot(K.concatenate([group1, group2, questions]), self.variablesvariables[0][0])
205  net = K.bias_add(net, self.variablesvariables[0][1])
206  for variables in self.variablesvariables[1:]:
207  net = self.activationactivation(net)
208  net = K.dot(net, variables[0])
209  net = K.bias_add(net, variables[1])
210  outputs.append(sigmoid(net))
211 
212  flat_result = K.concatenate(outputs)
213  return Reshape((self.combinationscombinations, self.number_featuresnumber_features,))(flat_result)
214 
215  def compute_output_shape(self, input_shape):
216  """
217  Compute Output shape
218  :return: Output shape
219  """
220  # accept only 2 inputs
221  assert(len(input_shape) == 2)
222  # first input should be a 2D layers
223  assert(len(input_shape[0]) == 3)
224  # second input should be a 1D layers
225  assert(len(input_shape[1]) == 2)
226 
227  self.combinationscombinations = np.int32(np.math.factorial(self.number_groupsnumber_groups) / (2 * np.math.factorial(self.number_groupsnumber_groups - 2)))
228 
229  return (input_shape[0][0], self.combinationscombinations, self.number_featuresnumber_features)
230 
231  def get_config(self):
232  """
233  Config required for saving parameters in keras model.
234  """
235  config = {
236  'number_features': self.number_featuresnumber_features,
237  'hidden_feature_shape': self.hidden_feature_shapehidden_feature_shape,
238  'activation': activations.serialize(self.activationactivation)
239  }
240  base_config = super(EnhancedRelations, self).get_config()
241  return dict(list(base_config.items()) + list(config.items()))
question_len
size of second input vector
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.
weightvariables
saves weights for call
number_features
Number of features.
def build(self, input_shape)
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.