Fix merge conflicts

This commit is contained in:
Francois Chollet 2016-02-25 10:34:35 -08:00
commit ababd95210
5 changed files with 22 additions and 22 deletions

@ -7,13 +7,9 @@ def softmax(x):
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
# apply softmax to each timestep
def step(x, states):
return K.softmax(x), []
last_output, outputs, states = K.rnn(step, x,
[],
mask=None)
return outputs
e = K.exp(x - K.max(x, axis=-1, keepdims=True))
s = K.sum(e, axis=-1, keepdims=True)
return e / s
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))

@ -353,6 +353,8 @@ def spatial_2d_padding(x, padding=(1, 1), dim_ordering='th'):
[0, 0]]
return tf.pad(x, pattern)
def pack(x):
return tf.pack(x)
# VALUE MANIPULATION

@ -415,6 +415,8 @@ def spatial_3d_padding(x, padding=(1, 1, 1), dim_ordering='th'):
raise Exception('Invalid dim_ordering: ' + dim_ordering)
return T.set_subtensor(output[indices], x)
def pack(x):
return T.stack(*x)
# VALUE MANIPULATION

@ -1145,17 +1145,18 @@ class TimeDistributedDense(MaskedLayer):
return (input_shape[0], input_shape[1], self.output_dim)
def get_output(self, train=False):
X = self.get_input(train)
def step(x, states):
output = K.dot(x, self.W) + self.b
return output, []
last_output, outputs, states = K.rnn(step, X,
initial_states=[],
mask=None)
outputs = self.activation(outputs)
return outputs
X = self.get_input(train) # (samples, timesteps, input_dim)
# Squash samples and timesteps into a single axis
x = K.reshape(X, (-1, self.input_shape[-1])) # (samples * timesteps, input_dim)
Y = K.dot(x, self.W) + self.b # (samples * timesteps, output_dim)
# We have to reshape Y to (samples, timesteps, output_dim)
input_length = self.input_shape[1]
# Note: input_length will always be provided when using tensorflow backend.
if not input_length:
input_length = K.shape(X)[1]
Y = K.reshape(Y, (-1, input_length, self.output_shape[-1])) # (samples, timesteps, output_dim)
Y = self.activation(Y)
return Y
def get_config(self):
config = {'name': self.__class__.__name__,

@ -149,10 +149,9 @@ class Recurrent(MaskedLayer):
def get_initial_states(self, x):
# build an all-zero tensor of shape (samples, output_dim)
initial_state = K.zeros_like(x) # (samples, timesteps, input_dim)
initial_state = K.sum(initial_state, axis=1) # (samples, input_dim)
reducer = K.zeros((self.input_dim, self.output_dim))
initial_state = K.dot(initial_state, reducer) # (samples, output_dim)
initial_state = x[:, 0, 0] * 0 # (samples, )
initial_state = K.pack([initial_state] * self.output_dim) # (output_dim, samples)
initial_state = K.permute_dimensions(initial_state, (1, 0)) # (samples, output_dim)
initial_states = [initial_state for _ in range(len(self.states))]
return initial_states