Skip to content

Commit

Permalink
deprecated dp and Recurrent and FastLSTM
Browse files Browse the repository at this point in the history
  • Loading branch information
Nicholas Leonard committed Apr 20, 2017
1 parent 31b0b89 commit a2f4eb2
Show file tree
Hide file tree
Showing 11 changed files with 398 additions and 380 deletions.
1 change: 0 additions & 1 deletion examples/noise-contrastive-estimate.lua
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,6 @@ while opt.maxepoch <= 0 or epoch <= opt.maxepoch do
sumErr = sumErr + err
-- backward
local gradOutputs = criterion:backward(outputs, targets)
local a = torch.Timer()
lm:zeroGradParameters()
lm:backward(inputs, gradOutputs)

Expand Down
11 changes: 5 additions & 6 deletions examples/recurrent-language-model.lua
Original file line number Diff line number Diff line change
Expand Up @@ -94,20 +94,19 @@ for i,hiddensize in ipairs(opt.hiddensize) do
if opt.gru then -- Gated Recurrent Units
rnn = nn.GRU(inputsize, hiddensize, nil, opt.dropout/2)
elseif opt.lstm then -- Long Short Term Memory units
require 'nngraph'
nn.FastLSTM.usenngraph = true -- faster
nn.FastLSTM.bn = opt.bn
rnn = nn.FastLSTM(inputsize, hiddensize)
rnn = nn.RecLSTM(inputsize, hiddensize)
elseif opt.mfru then -- Multi Function Recurrent Unit
rnn = nn.MuFuRu(inputsize, hiddensize)
else -- simple recurrent neural network
elseif i == 1 then -- simple recurrent neural network
local rm = nn.Sequential() -- input is {x[t], h[t-1]}
:add(nn.ParallelTable()
:add(i==1 and nn.Identity() or nn.Linear(inputsize, hiddensize)) -- input layer
:add(nn.Identity()) -- input layer
:add(nn.Linear(hiddensize, hiddensize))) -- recurrent layer
:add(nn.CAddTable()) -- merge
:add(nn.Sigmoid()) -- transfer
rnn = nn.Recurrence(rm, hiddensize, 1)
else
rnn = nn.LinearRNN(hiddensize, hiddensize)
end

stepmodule:add(rnn)
Expand Down
38 changes: 15 additions & 23 deletions examples/recurrent-time-series.lua
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
-- Multi-variate time-series example
-- Multi-variate time-series example

require 'rnn'

cmd = torch.CmdLine()
cmd:text()
cmd:text('Train a multivariate time-series model using RNN')
cmd:option('--rho', 5, 'maximum number of time steps for back-propagate through time (BPTT)')
cmd:option('--seqlen', 5, 'maximum number of time steps for back-propagate through time (BPTT)')
cmd:option('--multiSize', 6, 'number of random variables as input and output')
cmd:option('--hiddenSize', 10, 'number of hidden units used at output of the recurrent layer')
cmd:option('--dataSize', 100, 'total number of time-steps in dataset')
Expand Down Expand Up @@ -53,20 +53,12 @@ print('Sequence:'); print(sequence)

offsets = torch.LongTensor(opt.batchSize):random(1,opt.dataSize)

-- RNN
r = nn.Recurrent(
opt.hiddenSize, -- size of output
nn.Linear(opt.multiSize, opt.hiddenSize), -- input layer
nn.Linear(opt.hiddenSize, opt.hiddenSize), -- recurrent layer
nn.Sigmoid(), -- transfer function
opt.rho
)

-- Simple RNN
rnn = nn.Sequential()
:add(r)
:add(nn.LinearRNN(opt.multiSize, opt.hiddenSize))
:add(nn.Linear(opt.hiddenSize, opt.multiSize))

criterion = nn.MSECriterion()
criterion = nn.MSECriterion()

-- use Sequencer for better data handling
rnn = nn.Sequencer(rnn)
Expand All @@ -79,12 +71,12 @@ print(rnn)
minErr = opt.multiSize -- report min error
minK = 0
avgErrs = torch.Tensor(opt.nIterations):fill(0)
for k = 1, opt.nIterations do
for k = 1, opt.nIterations do

-- 1. create a sequence of seqlen time-steps

-- 1. create a sequence of rho time-steps

local inputs, targets = {}, {}
for step = 1, opt.rho do
for step = 1, opt.seqlen do
-- batch of inputs
inputs[step] = inputs[step] or sequence.new()
inputs[step]:index(sequence, 1, offsets)
Expand All @@ -99,10 +91,10 @@ for k = 1, opt.nIterations do

local outputs = rnn:forward(inputs)
local err = criterion:forward(outputs, targets)

-- report errors
print('Iter: ' .. k .. ' Err: ' .. err)

print('Iter: ' .. k .. ' Err: ' .. err)
if opt.plot then
logger:add{['Err'] = err}
logger:style{['Err'] = '-'}
Expand All @@ -116,14 +108,14 @@ for k = 1, opt.nIterations do
end

-- 3. backward sequence through rnn (i.e. backprop through time)

rnn:zeroGradParameters()

local gradOutputs = criterion:backward(outputs, targets)
local gradInputs = rnn:backward(inputs, gradOutputs)

-- 4. updates parameters

rnn:updateParameters(opt.learningRate)
end

Expand Down
Loading

0 comments on commit a2f4eb2

Please sign in to comment.