-
Notifications
You must be signed in to change notification settings - Fork 17
/
neural_tensor_layer.py
55 lines (46 loc) · 1.88 KB
/
neural_tensor_layer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
#!/usr/bin/python
import scipy.stats as stats
from keras import backend as K
from keras.engine.topology import Layer
class NeuralTensorLayer(Layer):
def __init__(self, output_dim, input_dim=None, **kwargs):
self.output_dim = output_dim #k
self.input_dim = input_dim #d
if self.input_dim:
kwargs['input_shape'] = (self.input_dim,)
super(NeuralTensorLayer, self).__init__(**kwargs)
def build(self, input_shape):
mean = 0.0
std = 1.0
# W : k*d*d
k = self.output_dim
d = self.input_dim
initial_W_values = stats.truncnorm.rvs(-2 * std, 2 * std, loc=mean, scale=std, size=(k,d,d))
initial_V_values = stats.truncnorm.rvs(-2 * std, 2 * std, loc=mean, scale=std, size=(2*d,k))
self.W = K.variable(initial_W_values)
self.V = K.variable(initial_V_values)
self.b = K.zeros((self.input_dim,))
self.trainable_weights = [self.W, self.V, self.b]
def call(self, inputs, mask=None):
if type(inputs) is not list or len(inputs) <= 1:
raise Exception('BilinearTensorLayer must be called on a list of tensors '
'(at least 2). Got: ' + str(inputs))
e1 = inputs[0]
e2 = inputs[1]
batch_size = K.shape(e1)[0]
k = self.output_dim
# print([e1,e2])
feed_forward_product = K.dot(K.concatenate([e1,e2]), self.V)
# print(feed_forward_product)
bilinear_tensor_products = [ K.sum((e2 * K.dot(e1, self.W[0])) + self.b, axis=1) ]
# print(bilinear_tensor_products)
for i in range(k)[1:]:
btp = K.sum((e2 * K.dot(e1, self.W[i])) + self.b, axis=1)
bilinear_tensor_products.append(btp)
result = K.tanh(K.reshape(K.concatenate(bilinear_tensor_products, axis=0), (batch_size, k)) + feed_forward_product)
# print(result)
return result
def compute_output_shape(self, input_shape):
# print (input_shape)
batch_size = input_shape[0][0]
return (batch_size, self.output_dim)