Skip to content

Commit

Permalink
Remove Compat and src/compat.jl (#318)
Browse files Browse the repository at this point in the history
  • Loading branch information
iblislin authored and pluskid committed Nov 13, 2017
1 parent 9304e6e commit f8e1938
Show file tree
Hide file tree
Showing 8 changed files with 8 additions and 34 deletions.
1 change: 0 additions & 1 deletion REQUIRE
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
julia 0.6
Compat 0.25.2
Formatting
BinDeps
JSON
Expand Down
2 changes: 0 additions & 2 deletions src/MXNet.jl
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@ import Base: round, ceil, floor, cos, sin, abs, sign, exp, sqrt, exp, log, norm,

include("base.jl")

include("compat.jl")

include("context.jl")
include("util.jl")

Expand Down
23 changes: 0 additions & 23 deletions src/compat.jl

This file was deleted.

6 changes: 3 additions & 3 deletions src/optimizers/adadelta.jl
Original file line number Diff line number Diff line change
Expand Up @@ -78,14 +78,14 @@ function update(self :: AdaDelta, index :: Int, weight :: NDArray,

# Update state.acc as in RMSProp
@inplace state.acc .*= self.opts.rho
@inplace state.acc .+= (1 - self.opts.rho) * @compatmul(grad, grad)
@inplace state.acc .+= (1 - self.opts.rho) * grad .* grad

# Compute update using the "old" state.delta_acc
update = @compatmul(grad, sqrt(state.delta_acc + self.opts.epsilon)) ./
update = grad .* sqrt(state.delta_acc + self.opts.epsilon) ./
(sqrt(state.acc + self.opts.epsilon))
@inplace weight .+= -lr * update

# update state.delta_acc using update
@inplace state.delta_acc .*= self.opts.rho
@inplace state.delta_acc .+= (1 - self.opts.rho) * @compatmul(update, update)
@inplace state.delta_acc .+= (1 - self.opts.rho) * update .* update
end
2 changes: 1 addition & 1 deletion src/optimizers/adagrad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,6 @@ function update(self :: AdaGrad, index :: Int, weight :: NDArray,
lr = get_learning_rate(self.opts.lr_scheduler, self.state)
grad = normalized_gradient(self.opts, self.state, weight, grad)

@inplace state .+= @compatmul(grad, grad)
@inplace state .+= grad .* grad
@inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon))
end
4 changes: 2 additions & 2 deletions src/optimizers/adam.jl
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ function update(self :: ADAM, index :: Int, weight :: NDArray, grad :: NDArray,
lr = state.current_lr
grad = normalized_gradient(self.opts, self.state, weight, grad)

state.mt = self.opts.beta1 * state.mt + (1 - self.opts.beta1) * grad
state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) * @compatmul(grad, grad)
state.mt = self.opts.beta1 * state.mt + (1 - self.opts.beta1) .* grad
state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) .* grad .* grad

at = sqrt(1.0 - state.beta2Power)/(1.0 - state.beta1Power)

Expand Down
2 changes: 1 addition & 1 deletion src/optimizers/nadam.jl
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ function update(self :: Nadam, index :: Int, weight :: NDArray,
mt = state.mt / (1.0 - momentum_next)

@inplace state.nt .*= self.opts.beta2
@inplace state.nt .+= (1.0 - self.opts.beta2) * @compatmul(grad, grad)
@inplace state.nt .+= (1.0 - self.opts.beta2) .* grad .* grad
nt = state.nt / (1.0 - state.beta2Power)
state.beta2Power *= self.opts.beta2

Expand Down
2 changes: 1 addition & 1 deletion src/optimizers/rmsprop.jl
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ function update(self :: RMSProp, index :: Int, weight :: NDArray,
grad = normalized_gradient(self.opts, self.state, weight, grad)

@inplace state .*= self.opts.rho
@inplace state .+= (1 - self.opts.rho) * @compatmul(grad, grad)
@inplace state .+= (1 - self.opts.rho) * grad .* grad

@inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon))
end

0 comments on commit f8e1938

Please sign in to comment.