Skip to content

Commit

Permalink
Make Recurrent layers usable with conv layers
Browse files Browse the repository at this point in the history
This commit is to make Recurrent and LSTM layers compatible with
convolutional layers.
  • Loading branch information
anlthms committed Jul 10, 2016
1 parent 64eb838 commit add634c
Showing 1 changed file with 6 additions and 1 deletion.
7 changes: 6 additions & 1 deletion neon/layers/recurrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,10 @@ def __init__(self, output_size, init, init_inner=None, activation=None,
self.reset_cells = reset_cells
self.init_inner = init_inner

def __str__(self):
return "Recurrent Layer '%s': %d inputs, %d outputs, %d steps" % (
self.name, self.nin, self.nout, self.nsteps)

def configure(self, in_obj):
"""
Set shape based parameters of this layer given an input tuple, int
Expand All @@ -105,6 +109,7 @@ def configure(self, in_obj):
super(Recurrent, self).configure(in_obj)

(self.nin, self.nsteps) = interpret_in_shape(self.in_shape)
self.in_shape = (self.nin, self.nsteps)

self.out_shape = (self.nout, self.nsteps)
self.gate_shape = (self.nout * self.ngates, self.nsteps)
Expand Down Expand Up @@ -163,7 +168,7 @@ def init_buffers(self, inputs):
if self.x is not None:
for buf in self.bufs_to_reset:
buf[:] = 0
self.x = inputs
self.x = inputs.reshape(self.nin, self.nsteps * self.be.bsz)
self.xs = get_steps(inputs, self.in_shape)

def init_params(self, shape):
Expand Down

0 comments on commit add634c

Please sign in to comment.