forked from torch/nn
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SoftPlus.lua
23 lines (20 loc) · 762 Bytes
/
SoftPlus.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
local SoftPlus, parent = torch.class('nn.SoftPlus', 'nn.Module')
function SoftPlus:__init(beta)
parent.__init(self)
self.beta = beta or 1 -- Beta controls sharpness of transfer function
self.threshold = 20 -- Avoid floating point issues with exp(x), x>20
end
function SoftPlus:updateOutput(input)
-- f(x) = 1/beta * log(1 + exp(beta * x))
input.nn.SoftPlus_updateOutput(self, input)
return self.output
end
function SoftPlus:updateGradInput(input, gradOutput)
-- d/dx[log(1+exp(k*x))/k] = exp(kx) / (exp(kx) + 1)
-- SINCE
-- y = (1/k)*log(1+exp(k*x)) --> x = (1/k)*log(exp(k*y)-1)
-- THEREFORE:
-- d/dx(f(x)) = (exp(k*y) - 1) / exp(k*y)
input.nn.SoftPlus_updateGradInput(self, input, gradOutput)
return self.gradInput
end