12 from keras.layers.core
import Reshape
13 from keras
import activations
14 from keras.activations
import sigmoid, tanh
15 from keras.engine.topology
import Layer
16 from keras
import backend
as K
22 This is a class which implements Relational Layer into Keras.
23 Relational Layer compares every combination of two feature groups with shared weights.
24 Use this class as every other Layer in Keras.
25 Relevant Paper: https://arxiv.org/abs/1706.01427
26 RN(O) = f_phi(sum_phi(g_theta(o_i,o_j)))
27 For flexibility reason only the part g(o_i,o_j) is modelled
28 f_phi corresponds to a MLP net
29 To sum over all permutations please use GlobalAveragePooling1D from keras.
32 def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
52 super(Relations, self).
__init__(**kwargs)
56 Build all weights for Relations Layer
57 :param input_shape: Input shape of tensor
61 assert(len(input_shape) == 3)
71 for i
in range(len(dense_shape[:-1])):
72 weights = self.add_weight(name=
'relation_weights_{}'.format(i),
73 shape=list(dense_shape[i:i + 2]), initializer=
'glorot_uniform', trainable=
True)
74 bias = self.add_weight(name=
'relation_weights_{}'.format(i),
75 shape=(dense_shape[i + 1],), initializer=
'zeros', trainable=
True)
79 super(Relations, self).
build(input_shape)
83 Compute Relational Layer
84 :param inputs: input tensor
85 :return: output tensor
87 input_groups = [inputs[:, i, :]
for i
in range(self.
number_groupsnumber_groups)]
89 for index, group1
in enumerate(input_groups[:-1]):
90 for group2
in input_groups[index + 1:]:
91 net = K.dot(K.concatenate([group1, group2]), self.
weightvariablesweightvariables[0][0])
95 net = K.dot(net, variables[0])
96 net = K.bias_add(net, variables[1])
97 outputs.append(sigmoid(net))
99 flat_result = K.concatenate(outputs)
105 :return: Output shape
108 assert(len(input_shape) == 3)
116 Config required for saving parameters in keras model.
121 'activation': activations.serialize(self.
activationactivation)
123 base_config = super(Relations, self).
get_config()
124 return dict(list(base_config.items()) + list(config.items()))
129 This is a class which implements Relational Layer into Keras.
130 See Class Relations for details.
131 EnhanceRelations use an additional input for passing event information to every comparison:
132 RN(O) = f_phi(sum_phi(g_theta(o_i,o_j,q)))
133 q is fed in as second one dimensional input.
136 def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
158 super(EnhancedRelations, self).
__init__(**kwargs)
162 Build all weights for Relations Layer
163 :param input_shape: Input shape of tensor
167 assert(len(input_shape) == 2)
169 assert(len(input_shape[0]) == 3)
171 assert(len(input_shape[1]) == 2)
175 self.
group_lengroup_len = input_shape[0][2]
183 for i
in range(len(dense_shape[:-1])):
184 weights = self.add_weight(name=
'relation_weights_{}'.format(i),
185 shape=list(dense_shape[i:i + 2]), initializer=
'glorot_uniform', trainable=
True)
186 bias = self.add_weight(name=
'relation_weights_{}'.format(i),
187 shape=(dense_shape[i + 1],), initializer=
'zeros', trainable=
True)
189 self.
variablesvariables.append([weights, bias])
191 super(EnhancedRelations, self).
build(input_shape)
195 Compute Relational Layer
196 :param inputs: input tensor
197 :return: output tensor
199 input_groups = [inputs[0][:, i, :]
for i
in range(self.
number_groupsnumber_groups)]
200 questions = inputs[1]
202 for index, group1
in enumerate(input_groups[:-1]):
203 for group2
in input_groups[index + 1:]:
204 net = K.dot(K.concatenate([group1, group2, questions]), self.
variablesvariables[0][0])
205 net = K.bias_add(net, self.
variablesvariables[0][1])
206 for variables
in self.
variablesvariables[1:]:
208 net = K.dot(net, variables[0])
209 net = K.bias_add(net, variables[1])
210 outputs.append(sigmoid(net))
212 flat_result = K.concatenate(outputs)
218 :return: Output shape
221 assert(len(input_shape) == 2)
223 assert(len(input_shape[0]) == 3)
225 assert(len(input_shape[1]) == 2)
233 Config required for saving parameters in keras model.
238 'activation': activations.serialize(self.
activationactivation)
240 base_config = super(EnhancedRelations, self).
get_config()
241 return dict(list(base_config.items()) + list(config.items()))
variables
saves weights for call
number_features
Number of features.
def build(self, input_shape)
question_len
size of second input vector
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.
weightvariables
saves weights for call
number_features
Number of features.
def build(self, input_shape)
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.