12 from tensorflow.keras.layers
import Layer, Reshape
13 from tensorflow.keras
import activations
14 from tensorflow.keras.activations
import sigmoid, tanh
15 from tensorflow.keras
import backend
as K
21 This is a class which implements Relational Layer into Keras.
22 Relational Layer compares every combination of two feature groups with shared weights.
23 Use this class as every other Layer in Keras.
24 Relevant Paper: https://arxiv.org/abs/1706.01427
25 RN(O) = f_phi(sum_phi(g_theta(o_i,o_j)))
26 For flexibility reason only the part g(o_i,o_j) is modelled
27 f_phi corresponds to a MLP net
28 To sum over all permutations please use GlobalAveragePooling1D from keras.
31 def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
51 super(Relations, self).
__init__(**kwargs)
55 Build all weights for Relations Layer
56 :param input_shape: Input shape of tensor
60 assert(len(input_shape) == 3)
70 for i
in range(len(dense_shape[:-1])):
71 weights = self.add_weight(name=
'relation_weights_{}'.format(i),
72 shape=list(dense_shape[i:i + 2]), initializer=
'glorot_uniform', trainable=
True)
73 bias = self.add_weight(name=
'relation_weights_{}'.format(i),
74 shape=(dense_shape[i + 1],), initializer=
'zeros', trainable=
True)
78 super(Relations, self).
build(input_shape)
82 Compute Relational Layer
83 :param inputs: input tensor
84 :return: output tensor
86 input_groups = [inputs[:, i, :]
for i
in range(self.
number_groupsnumber_groups)]
88 for index, group1
in enumerate(input_groups[:-1]):
89 for group2
in input_groups[index + 1:]:
90 net = K.dot(K.concatenate([group1, group2]), self.
weightvariablesweightvariables[0][0])
94 net = K.dot(net, variables[0])
95 net = K.bias_add(net, variables[1])
96 outputs.append(sigmoid(net))
98 flat_result = K.concatenate(outputs)
104 :return: Output shape
107 assert(len(input_shape) == 3)
115 Config required for saving parameters in keras model.
120 'activation': activations.serialize(self.
activationactivation)
122 base_config = super(Relations, self).
get_config()
123 return dict(list(base_config.items()) + list(config.items()))
128 This is a class which implements Relational Layer into Keras.
129 See Class Relations for details.
130 EnhanceRelations use an additional input for passing event information to every comparison:
131 RN(O) = f_phi(sum_phi(g_theta(o_i,o_j,q)))
132 q is fed in as second one dimensional input.
135 def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs):
157 super(EnhancedRelations, self).
__init__(**kwargs)
161 Build all weights for Relations Layer
162 :param input_shape: Input shape of tensor
166 assert(len(input_shape) == 2)
168 assert(len(input_shape[0]) == 3)
170 assert(len(input_shape[1]) == 2)
174 self.
group_lengroup_len = input_shape[0][2]
182 for i
in range(len(dense_shape[:-1])):
183 weights = self.add_weight(name=
'relation_weights_{}'.format(i),
184 shape=list(dense_shape[i:i + 2]), initializer=
'glorot_uniform', trainable=
True)
185 bias = self.add_weight(name=
'relation_weights_{}'.format(i),
186 shape=(dense_shape[i + 1],), initializer=
'zeros', trainable=
True)
190 super(EnhancedRelations, self).
build(input_shape)
194 Compute Relational Layer
195 :param inputs: input tensor
196 :return: output tensor
198 input_groups = [inputs[0][:, i, :]
for i
in range(self.
number_groupsnumber_groups)]
199 questions = inputs[1]
201 for index, group1
in enumerate(input_groups[:-1]):
202 for group2
in input_groups[index + 1:]:
203 net = K.dot(K.concatenate([group1, group2, questions]), self.
weightvariablesweightvariables[0][0])
207 net = K.dot(net, variables[0])
208 net = K.bias_add(net, variables[1])
209 outputs.append(sigmoid(net))
211 flat_result = K.concatenate(outputs)
217 :return: Output shape
220 assert(len(input_shape) == 2)
222 assert(len(input_shape[0]) == 3)
224 assert(len(input_shape[1]) == 2)
232 Config required for saving parameters in keras model.
237 'activation': activations.serialize(self.
activationactivation)
239 base_config = super(EnhancedRelations, self).
get_config()
240 return dict(list(base_config.items()) + list(config.items()))
weightvariables
saves weights for call
number_features
Number of features.
def build(self, input_shape)
question_len
size of second input vector
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.
weightvariables
saves weights for call
number_features
Number of features.
def build(self, input_shape)
hidden_feature_shape
shape of hidden layers used for extracting relations
combinations
number of relation combinations
number_groups
Number of groups in input.
def __init__(self, number_features, hidden_feature_shape=[30, 30, 30, 30], activation=tanh, **kwargs)
group_len
how many neurons has one comparable object
def compute_output_shape(self, input_shape)
activation
activation used for hidden layer in shared weights.