Belle II Software  light-2212-foldex
keras_relational.py
1 #!/usr/bin/env python3
2 # -*- coding: utf-8 -*-
3 
4 
11 
12 from tensorflow.keras.layers import Layer, Reshape
13 from tensorflow.keras import activations
14 from tensorflow.keras.activations import sigmoid, tanh
15 from tensorflow.keras import backend as K
16 import numpy as np
17 
18 
19 class Relations(Layer):
20  """
21  This is a class which implements Relational Layer into Keras.
22  Relational Layer compares every combination of two feature groups with shared weights.
23  Use this class as every other Layer in Keras.
24  Relevant Paper: https://arxiv.org/abs/1706.01427
25  RN(O) = f_phi(sum_phi(g_theta(o_i,o_j)))
26  For flexibility reason only the part g(o_i,o_j) is modelled
27  f_phi corresponds to a MLP net
28  To sum over all permutations please use GlobalAveragePooling1D from keras.
29  """
30 
31  def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
32  """
33  Init class.
34  """
35 
36 
37  self.number_featuresnumber_features = number_features
38 
39  self.number_groupsnumber_groups = 0
40 
41  self.hidden_feature_shapehidden_feature_shape = hidden_feature_shape
42 
43  self.activationactivation = activations.get(activation)
44 
45  self.group_lengroup_len = 0
46 
47  self.weightvariablesweightvariables = []
48 
49  self.combinationscombinations = 0
50 
51  super(Relations, self).__init__(**kwargs)
52 
53  def build(self, input_shape):
54  """
55  Build all weights for Relations Layer
56  :param input_shape: Input shape of tensor
57  :return: Nothing
58  """
59  # only accept 2D layers
60  assert(len(input_shape) == 3)
61 
62  self.number_groupsnumber_groups = input_shape[1]
63 
64  self.group_lengroup_len = input_shape[2]
65 
66  self.combinationscombinations = np.int32(np.math.factorial(self.number_groupsnumber_groups) / (2 * np.math.factorial(self.number_groupsnumber_groups - 2)))
67 
68  dense_shape = [2 * self.group_lengroup_len] + self.hidden_feature_shapehidden_feature_shape + [self.number_featuresnumber_features]
69 
70  for i in range(len(dense_shape[:-1])):
71  weights = self.add_weight(name='relation_weights_{}'.format(i),
72  shape=list(dense_shape[i:i + 2]), initializer='glorot_uniform', trainable=True)
73  bias = self.add_weight(name='relation_weights_{}'.format(i),
74  shape=(dense_shape[i + 1],), initializer='zeros', trainable=True)
75 
76  self.weightvariablesweightvariables.append([weights, bias])
77 
78  super(Relations, self).build(input_shape)
79 
80  def call(self, inputs):
81  """
82  Compute Relational Layer
83  :param inputs: input tensor
84  :return: output tensor
85  """
86  input_groups = [inputs[:, i, :] for i in range(self.number_groupsnumber_groups)]
87  outputs = []
88  for index, group1 in enumerate(input_groups[:-1]):
89  for group2 in input_groups[index + 1:]:
90  net = K.dot(K.concatenate([group1, group2]), self.weightvariablesweightvariables[0][0])
91  net = K.bias_add(net, self.weightvariablesweightvariables[0][1])
92  for variables in self.weightvariablesweightvariables[1:]:
93  net = self.activationactivation(net)
94  net = K.dot(net, variables[0])
95  net = K.bias_add(net, variables[1])
96  outputs.append(sigmoid(net))
97 
98  flat_result = K.concatenate(outputs)
99  return Reshape((self.combinationscombinations, self.number_featuresnumber_features,))(flat_result)
100 
101  def compute_output_shape(self, input_shape):
102  """
103  Compute Output shape
104  :return: Output shape
105  """
106  # only 2D layers
107  assert(len(input_shape) == 3)
108 
109  self.combinationscombinations = np.int32(np.math.factorial(self.number_groupsnumber_groups) / (2 * np.math.factorial(self.number_groupsnumber_groups - 2)))
110 
111  return (input_shape[0], self.combinationscombinations, self.number_featuresnumber_features)
112 
113  def get_config(self):
114  """
115  Config required for saving parameters in keras model.
116  """
117  config = {
118  'number_features': self.number_featuresnumber_features,
119  'hidden_feature_shape': self.hidden_feature_shapehidden_feature_shape,
120  'activation': activations.serialize(self.activationactivation)
121  }
122  base_config = super(Relations, self).get_config()
123  return dict(list(base_config.items()) + list(config.items()))
124 
125 
126 class EnhancedRelations(Layer):
127  """
128  This is a class which implements Relational Layer into Keras.
129  See Class Relations for details.
130  EnhanceRelations use an additional input for passing event information to every comparison:
131  RN(O) = f_phi(sum_phi(g_theta(o_i,o_j,q)))
132  q is fed in as second one dimensional input.
133  """
134 
135  def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
136  """
137  Init class.
138  """
139 
140 
141  self.number_featuresnumber_features = number_features
142 
143  self.number_groupsnumber_groups = 0
144 
145  self.hidden_feature_shapehidden_feature_shape = hidden_feature_shape
146 
147  self.activationactivation = activations.get(activation)
148 
149  self.group_lengroup_len = 0
150 
151  self.weightvariablesweightvariables = []
152 
153  self.combinationscombinations = 0
154 
155  self.question_lenquestion_len = 0
156 
157  super(EnhancedRelations, self).__init__(**kwargs)
158 
159  def build(self, input_shape):
160  """
161  Build all weights for Relations Layer
162  :param input_shape: Input shape of tensor
163  :return: Nothing
164  """
165  # accept only 2 inputs
166  assert(len(input_shape) == 2)
167  # first input should be a 2D layers
168  assert(len(input_shape[0]) == 3)
169  # second input should be a 1D layers
170  assert(len(input_shape[1]) == 2)
171 
172  self.number_groupsnumber_groups = input_shape[0][1]
173 
174  self.group_lengroup_len = input_shape[0][2]
175 
176  self.question_lenquestion_len = input_shape[1][1]
177 
178  self.combinationscombinations = np.int32(np.math.factorial(self.number_groupsnumber_groups) / (2 * np.math.factorial(self.number_groupsnumber_groups - 2)))
179 
180  dense_shape = [2 * self.group_lengroup_len + self.question_lenquestion_len] + self.hidden_feature_shapehidden_feature_shape + [self.number_featuresnumber_features]
181 
182  for i in range(len(dense_shape[:-1])):
183  weights = self.add_weight(name='relation_weights_{}'.format(i),
184  shape=list(dense_shape[i:i + 2]), initializer='glorot_uniform', trainable=True)
185  bias = self.add_weight(name='relation_weights_{}'.format(i),
186  shape=(dense_shape[i + 1],), initializer='zeros', trainable=True)
187 
188  self.weightvariablesweightvariables.append([weights, bias])
189 
190  super(EnhancedRelations, self).build(input_shape)
191 
192  def call(self, inputs):
193  """
194  Compute Relational Layer
195  :param inputs: input tensor
196  :return: output tensor
197  """
198  input_groups = [inputs[0][:, i, :] for i in range(self.number_groupsnumber_groups)]
199  questions = inputs[1]
200  outputs = []
201  for index, group1 in enumerate(input_groups[:-1]):
202  for group2 in input_groups[index + 1:]:
203  net = K.dot(K.concatenate([group1, group2, questions]), self.weightvariablesweightvariables[0][0])
204  net = K.bias_add(net, self.weightvariablesweightvariables[0][1])
205  for variables in self.weightvariablesweightvariables[1:]:
206  net = self.activationactivation(net)
207  net = K.dot(net, variables[0])
208  net = K.bias_add(net, variables[1])
209  outputs.append(sigmoid(net))
210 
211  flat_result = K.concatenate(outputs)
212  return Reshape((self.combinationscombinations, self.number_featuresnumber_features,))(flat_result)
213 
214  def compute_output_shape(self, input_shape):
215  """
216  Compute Output shape
217  :return: Output shape
218  """
219  # accept only 2 inputs
220  assert(len(input_shape) == 2)
221  # first input should be a 2D layers
222  assert(len(input_shape[0]) == 3)
223  # second input should be a 1D layers
224  assert(len(input_shape[1]) == 2)
225 
226  self.combinationscombinations = np.int32(np.math.factorial(self.number_groupsnumber_groups) / (2 * np.math.factorial(self.number_groupsnumber_groups - 2)))
227 
228  return (input_shape[0][0], self.combinationscombinations, self.number_featuresnumber_features)
229 
230  def get_config(self):
231  """
232  Config required for saving parameters in keras model.
233  """
234  config = {
235  'number_features': self.number_featuresnumber_features,
236  'hidden_feature_shape': self.hidden_feature_shapehidden_feature_shape,
237  'activation': activations.serialize(self.activationactivation)
238  }
239  base_config = super(EnhancedRelations, self).get_config()
240  return dict(list(base_config.items()) + list(config.items()))
weightvariables
saves weights for call
question_len
size of second input vector
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.
weightvariables
saves weights for call
number_features
Number of features.
def build(self, input_shape)
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.