11from tensorflow.keras.layers
import Layer, Reshape
12from tensorflow.keras
import activations
13from tensorflow.keras.activations
import sigmoid, tanh
14from tensorflow.keras
import backend
as K
20 This is a
class which implements Relational Layer into Keras.
21 Relational Layer compares every combination of two feature groups
with shared weights.
22 Use this
class as every other Layer in Keras.
23 Relevant Paper: https://arxiv.org/abs/1706.01427
24 RN(O) = f_phi(sum_phi(g_theta(o_i,o_j)))
25 For flexibility reason only the part g(o_i,o_j)
is modelled
26 f_phi corresponds to a MLP net
27 To sum over all permutations please use GlobalAveragePooling1D
from keras.
30 def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
54 Build all weights for Relations Layer
55 :param input_shape: Input shape of tensor
59 assert(len(input_shape) == 3)
69 for i
in range(len(dense_shape[:-1])):
70 weights = self.add_weight(name=f
'relation_weights_{i}',
71 shape=list(dense_shape[i:i + 2]), initializer=
'glorot_uniform', trainable=
True)
72 bias = self.add_weight(name=f
'relation_weights_{i}',
73 shape=(dense_shape[i + 1],), initializer=
'zeros', trainable=
True)
77 super().
build(input_shape)
81 Compute Relational Layer
82 :param inputs: input tensor
83 :return: output tensor
85 input_groups = [inputs[:, i, :] for i
in range(self.
number_groups)]
87 for index, group1
in enumerate(input_groups[:-1]):
88 for group2
in input_groups[index + 1:]:
89 net = K.dot(K.concatenate([group1, group2]), self.
weightvariables[0][0])
93 net = K.dot(net, variables[0])
94 net = K.bias_add(net, variables[1])
95 outputs.append(sigmoid(net))
97 flat_result = K.concatenate(outputs)
103 :return: Output shape
106 assert(len(input_shape) == 3)
114 Config required for saving parameters
in keras model.
119 'activation': activations.serialize(self.
activation)
122 return dict(list(base_config.items()) + list(config.items()))
127 This is a
class which implements Relational Layer into Keras.
128 See Class Relations
for details.
129 EnhanceRelations use an additional input
for passing event information to every comparison:
130 RN(O) = f_phi(sum_phi(g_theta(o_i,o_j,q)))
131 q
is fed
in as second one dimensional input.
134 def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
160 Build all weights for Relations Layer
161 :param input_shape: Input shape of tensor
165 assert(len(input_shape) == 2)
167 assert(len(input_shape[0]) == 3)
169 assert(len(input_shape[1]) == 2)
181 for i
in range(len(dense_shape[:-1])):
182 weights = self.add_weight(name=f
'relation_weights_{i}',
183 shape=list(dense_shape[i:i + 2]), initializer=
'glorot_uniform', trainable=
True)
184 bias = self.add_weight(name=f
'relation_weights_{i}',
185 shape=(dense_shape[i + 1],), initializer=
'zeros', trainable=
True)
189 super().
build(input_shape)
193 Compute Relational Layer
194 :param inputs: input tensor
195 :return: output tensor
197 input_groups = [inputs[0][:, i, :] for i
in range(self.
number_groups)]
198 questions = inputs[1]
200 for index, group1
in enumerate(input_groups[:-1]):
201 for group2
in input_groups[index + 1:]:
202 net = K.dot(K.concatenate([group1, group2, questions]), self.
weightvariables[0][0])
206 net = K.dot(net, variables[0])
207 net = K.bias_add(net, variables[1])
208 outputs.append(sigmoid(net))
210 flat_result = K.concatenate(outputs)
216 :return: Output shape
219 assert(len(input_shape) == 2)
221 assert(len(input_shape[0]) == 3)
223 assert(len(input_shape[1]) == 2)
231 Config required for saving parameters
in keras model.
236 'activation': activations.serialize(self.
activation)
239 return dict(list(base_config.items()) + list(config.items()))
weightvariables
saves weights for call
number_features
Number of features.
def build(self, input_shape)
question_len
size of second input vector
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.
weightvariables
saves weights for call
number_features
Number of features.
def build(self, input_shape)
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.