Skip to content

Commit

Permalink
Update GRU.lua
Browse files Browse the repository at this point in the history
Added oneHot encoding and softmax
  • Loading branch information
guillitte committed Jul 31, 2015
1 parent ef0373f commit 920ef81
Showing 1 changed file with 17 additions and 6 deletions.
23 changes: 17 additions & 6 deletions model/GRU.lua
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ local GRU = {}
Creates one timestep of one GRU
Paper reference: http://arxiv.org/pdf/1412.3555v1.pdf
]]--
function GRU.gru(input_size, rnn_size, n)

function GRU.gru(input_size, rnn_size, n, dropout)
dropout = dropout or 0
-- there are n+1 inputs (hiddens on each layer and x)
local inputs = {}
table.insert(inputs, nn.Identity()()) -- x
Expand All @@ -25,9 +25,15 @@ function GRU.gru(input_size, rnn_size, n)
for L = 1,n do

local prev_h = inputs[L+1]
if L == 1 then x = inputs[1] else x = outputs[L-1] end
if L == 1 then input_size_L = input_size else input_size_L = rnn_size end

-- the input to this layer
if L == 1 then
x = OneHot(input_size)(inputs[1])
input_size_L = input_size
else
x = outputs[(L-1)]
if dropout > 0 then x = nn.Dropout(dropout)(x) end -- apply dropout, if any
input_size_L = rnn_size
end
-- GRU tick
-- forward the update and reset gates
local update_gate = nn.Sigmoid()(new_input_sum(input_size_L, x, prev_h))
Expand All @@ -44,9 +50,14 @@ function GRU.gru(input_size, rnn_size, n)

table.insert(outputs, next_h)
end
-- set up the decoder
local top_h = outputs[#outputs]
if dropout > 0 then top_h = nn.Dropout(dropout)(top_h) end
local proj = nn.Linear(rnn_size, input_size)(top_h)
local logsoft = nn.LogSoftMax()(proj)
table.insert(outputs, logsoft)

return nn.gModule(inputs, outputs)
end

return GRU

0 comments on commit 920ef81

Please sign in to comment.