-
Notifications
You must be signed in to change notification settings - Fork 958
/
Copy pathVolumetricDilatedConvolution.lua
84 lines (78 loc) · 2.95 KB
/
VolumetricDilatedConvolution.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
local THNN = require 'nn.THNN'
local VolumetricDilatedConvolution, parent = torch.class('nn.VolumetricDilatedConvolution', 'nn.VolumetricConvolution')
function VolumetricDilatedConvolution:__init(nInputPlane, nOutputPlane, kT, kW, kH, dT, dW, dH, padT, padW, padH, dilationT, dilationW, dilationH)
parent.__init(self, nInputPlane, nOutputPlane, kT, kW, kH, dT, dW, dH, padT, padW, padH)
self.dilationT = dilationT or 1
self.dilationW = dilationW or 1
self.dilationH = dilationH or 1
end
function VolumetricDilatedConvolution:updateOutput(input)
self.finput = self.finput or self.weight.new()
self.fgradInput = self.fgradInput or self.weight.new()
input.THNN.VolumetricDilatedConvolution_updateOutput(
input:cdata(),
self.output:cdata(),
self.weight:cdata(),
THNN.optionalTensor(self.bias),
self.finput:cdata(),
self.fgradInput:cdata(),
self.kT, self.kW, self.kH,
self.dT, self.dW, self.dH,
self.padT, self.padW, self.padH,
self.dilationT, self.dilationW, self.dilationH
)
return self.output
end
function VolumetricDilatedConvolution:updateGradInput(input, gradOutput)
if self.gradInput then
self.fgradInput = self.fgradInput or self.weight.new()
input.THNN.VolumetricDilatedConvolution_updateGradInput(
input:cdata(),
gradOutput:cdata(),
self.gradInput:cdata(),
self.weight:cdata(),
self.finput:cdata(),
self.kT, self.kW, self.kH,
self.dT, self.dW, self.dH,
self.padT, self.padW, self.padH,
self.dilationT, self.dilationW, self.dilationH
)
return self.gradInput
end
end
function VolumetricDilatedConvolution:accGradParameters(input, gradOutput, scale)
scale = scale or 1
self.fgradInput = self.fgradInput or self.weight.new()
input.THNN.VolumetricDilatedConvolution_accGradParameters(
input:cdata(),
gradOutput:cdata(),
self.gradWeight:cdata(),
THNN.optionalTensor(self.gradBias),
self.finput:cdata(),
self.fgradInput:cdata(),
self.kT, self.kW, self.kH,
self.dT, self.dW, self.dH,
self.padT, self.padW, self.padH,
self.dilationT, self.dilationW, self.dilationH,
scale
)
end
function VolumetricDilatedConvolution:__tostring__()
local s = string.format('%s(%d -> %d, %dx%dx%d', torch.type(self),
self.nInputPlane, self.nOutputPlane, self.kT, self.kW, self.kH)
if self.dT ~= 1 or self.dW ~= 1 or self.dH ~= 1
or self.padT ~= 0 or self.padW ~= 0 or self.padH ~= 0 then
s = s .. string.format(', %d,%d,%d', self.dT, self.dW, self.dH)
end
if (self.padT or self.padW or self.padH)
and (self.padT ~= 0 or self.padW ~= 0 or self.padH ~= 0) then
s = s .. ', ' .. self.padT .. ',' .. self.padW .. ',' .. self.padH
end
s = s .. ', ' .. self.dilationT .. ','
.. self.dilationW .. ',' .. self.dilationH
if self.bias then
return s .. ')'
else
return s .. ') without bias'
end
end