Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
remove dependency to 'kex'. Moved relevant stuff from kex to 'nn' or …
…'unsup'
- Loading branch information
Showing
8 changed files
with
159 additions
and
16 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,44 @@ | ||
local Diag,parent = torch.class('nn.Diag','nn.Module') | ||
|
||
function Diag:__init(nFeature) | ||
parent.__init(self) | ||
self.weight = torch.Tensor(nFeature) | ||
self.gradWeight = torch.Tensor(nFeature) | ||
|
||
self:reset() | ||
end | ||
|
||
function Diag:reset(stdv) | ||
self.weight:fill(1) | ||
end | ||
|
||
function Diag:updateOutput(input) | ||
self.output:resizeAs(input):copy(input) | ||
if input:dim() > 1 then | ||
for i=1,input:size(1) do | ||
self.output[{{i}}]:mul(self.weight[i]) | ||
end | ||
else | ||
self.output:cmul(self.weight) | ||
end | ||
return self.output | ||
end | ||
|
||
function Diag:updateGradInput(input, gradOutput) | ||
self.gradInput:resizeAs(gradOutput):copy(gradOutput) | ||
if input:dim() > 1 then | ||
for i=1,input:size(1) do | ||
self.gradInput[{{i}}]:mul(self.weight[i]) | ||
end | ||
else | ||
self.gradInput:cmul(self.weight) | ||
end | ||
return self.gradInput | ||
end | ||
|
||
function Diag:accGradParameters(input, gradOutput, scale) | ||
for i=1,input:size(1) do | ||
self.gradWeight[i] = self.gradWeight[i] + scale*gradOutput[{{i}}]:dot(input[{{i}}]) | ||
end | ||
end | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
#ifndef TH_GENERIC_FILE | ||
#define TH_GENERIC_FILE "generic/util.c" | ||
#else | ||
|
||
static int unsup_(shrinkage)(lua_State *L) | ||
{ | ||
real lambda = luaL_checknumber(L,2); | ||
THTensor *tensor = luaT_checkudata(L,1, torch_Tensor); | ||
luaL_argcheck(L, lambda >=0, 2, "Lambda should be non-negative"); | ||
|
||
if (lambda == 0) return 1; | ||
|
||
TH_TENSOR_APPLY(real, tensor, | ||
if (*tensor_data > lambda) | ||
{ | ||
*tensor_data -= lambda; | ||
} | ||
else if (*tensor_data < -lambda) | ||
{ | ||
*tensor_data += lambda; | ||
} | ||
else | ||
{ | ||
*tensor_data = 0; | ||
}); | ||
return 1; | ||
} | ||
|
||
static const struct luaL_Reg unsup_(util__) [] = { | ||
{"shrinkage", unsup_(shrinkage)}, | ||
{NULL, NULL} | ||
}; | ||
|
||
static void unsup_(util_init)(lua_State *L) | ||
{ | ||
luaT_pushmetatable(L, torch_Tensor); | ||
luaL_register(L, NULL, unsup_(util__)); | ||
lua_pop(L,1); | ||
} | ||
|
||
#endif |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
#include "TH.h" | ||
#include "luaT.h" | ||
|
||
#define torch_(NAME) TH_CONCAT_3(torch_, Real, NAME) | ||
#define torch_Tensor TH_CONCAT_STRING_3(torch.,Real,Tensor) | ||
#define unsup_(NAME) TH_CONCAT_3(unsup_, Real, NAME) | ||
#define torch_string_(NAME) TH_CONCAT_STRING_3(torch., Real, NAME) | ||
|
||
#include "generic/util.c" | ||
#include "THGenerateFloatTypes.h" | ||
|
||
DLL_EXPORT int luaopen_libunsup(lua_State *L) | ||
{ | ||
lua_newtable(L); | ||
lua_pushvalue(L, -1); | ||
lua_setfield(L, LUA_GLOBALSINDEX, "unsup"); | ||
|
||
unsup_Floatutil_init(L); | ||
unsup_Doubleutil_init(L); | ||
|
||
return 1; | ||
} | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters