Skip to content

Commit

Permalink
Change _optimizer to _optimizer_func
Browse files Browse the repository at this point in the history
  • Loading branch information
EthanRosenthal committed Jul 30, 2017
1 parent de2cd82 commit 3e57a95
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 14 deletions.
11 changes: 6 additions & 5 deletions spotlight/factorization/explicit.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class ExplicitFactorizationModel(object):
L2 loss penalty.
learning_rate: float, optional
Initial learning rate.
optimizer: function, optional
optimizer_func: function, optional
Function that takes in module parameters as the first argument and
returns an instance of a Pytorch optimizer. Overrides l2 and learning
rate if supplied. If no optimizer supplied, then use ADAM by default.
Expand All @@ -66,7 +66,7 @@ def __init__(self,
batch_size=256,
l2=0.0,
learning_rate=1e-2,
optimizer=None,
optimizer_func=None,
use_cuda=False,
sparse=False,
random_state=None):
Expand All @@ -82,12 +82,13 @@ def __init__(self,
self._l2 = l2
self._use_cuda = use_cuda
self._sparse = sparse
self._optimizer = optimizer
self._optimizer_func = optimizer_func
self._random_state = random_state or np.random.RandomState()

self._num_users = None
self._num_items = None
self._net = None
self._optimizer = None

set_seed(self._random_state.randint(-10**8, 10**8),
cuda=self._use_cuda)
Expand Down Expand Up @@ -122,14 +123,14 @@ def fit(self, interactions, verbose=False):
self._use_cuda
)

if self._optimizer is None:
if self._optimizer_func is None:
self._optimizer = optim.Adam(
self._net.parameters(),
weight_decay=self._l2,
lr=self._learning_rate
)
else:
self._optimizer = self._optimizer(self._net.parameters())
self._optimizer = self._optimizer_func(self._net.parameters())

if self._loss == 'regression':
loss_fnc = regression_loss
Expand Down
9 changes: 5 additions & 4 deletions spotlight/factorization/implicit.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ class ImplicitFactorizationModel(object):
L2 loss penalty.
learning_rate: float, optional
Initial learning rate.
optimizer: function, optional
optimizer_func: function, optional
Function that takes in module parameters as the first argument and
returns an instance of a Pytorch optimizer. Overrides l2 and learning
rate if supplied. If no optimizer supplied, then use ADAM by default.
Expand All @@ -74,7 +74,7 @@ def __init__(self,
batch_size=256,
l2=0.0,
learning_rate=1e-2,
optimizer=None,
optimizer_func=None,
use_cuda=False,
sparse=False,
random_state=None):
Expand All @@ -92,12 +92,13 @@ def __init__(self,
self._l2 = l2
self._use_cuda = use_cuda
self._sparse = sparse
self._optimizer = optimizer
self._optimizer_func = optimizer_func
self._random_state = random_state or np.random.RandomState()

self._num_users = None
self._num_items = None
self._net = None
self._optimizer = None

set_seed(self._random_state.randint(-10**8, 10**8),
cuda=self._use_cuda)
Expand Down Expand Up @@ -139,7 +140,7 @@ def fit(self, interactions, verbose=False):
lr=self._learning_rate
)
else:
self._optimizer = self._optimizer(self._net.parameters())
self._optimizer = self._optimizer_func(self._net.parameters())

if self._loss == 'pointwise':
loss_fnc = pointwise_loss
Expand Down
9 changes: 5 additions & 4 deletions spotlight/sequence/implicit.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ class ImplicitSequenceModel(object):
L2 loss penalty.
learning_rate: float, optional
Initial learning rate.
optimizer: function, optional
optimizer_func: function, optional
Function that takes in module parameters as the first argument and
returns an instance of a Pytorch optimizer. Overrides l2 and learning
rate if supplied. If no optimizer supplied, then use ADAM by default.
Expand Down Expand Up @@ -87,7 +87,7 @@ def __init__(self,
batch_size=256,
l2=0.0,
learning_rate=1e-2,
optimizer=None,
optimizer_func=None,
use_cuda=False,
sparse=False,
random_state=None):
Expand All @@ -111,11 +111,12 @@ def __init__(self,
self._l2 = l2
self._use_cuda = use_cuda
self._sparse = sparse
self._optimizer = optimizer
self._optimizer_func = optimizer_func
self._random_state = random_state or np.random.RandomState()

self._num_items = None
self._net = None
self._optimizer = None

set_seed(self._random_state.randint(-10**8, 10**8),
cuda=self._use_cuda)
Expand Down Expand Up @@ -163,7 +164,7 @@ def fit(self, interactions, verbose=False):
lr=self._learning_rate
)
else:
self._optimizer = self._optimizer(self._net.parameters())
self._optimizer = self._optimizer_func(self._net.parameters())

if self._loss == 'pointwise':
loss_fnc = pointwise_loss
Expand Down
2 changes: 1 addition & 1 deletion tests/factorization/test_implicit.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def adagrad_optimizer(model_params,
model = ImplicitFactorizationModel(loss='bpr',
n_iter=10,
batch_size=1024,
optimizer=adagrad_optimizer)
optimizer_func=adagrad_optimizer)
model.fit(train)

mrr = mrr_score(model, test, train=train).mean()
Expand Down

0 comments on commit 3e57a95

Please sign in to comment.