Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ os:
- linux
- osx
julia:
- 0.3
- 0.4
- 0.5
- nightly
Expand Down
2 changes: 1 addition & 1 deletion REQUIRE
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
julia 0.3.7
julia 0.4
Compat 0.4.0
8 changes: 4 additions & 4 deletions src/differentiate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,9 @@ end
# d/dx (f * g * h) = (d/dx f) * g * h + f * (d/dx g) * h + ...
function differentiate(::SymbolParameter{:*}, args, wrt)
n = length(args)
res_args = Array(Any, n)
res_args = Vector{Any}(n)
for i in 1:n
new_args = Array(Any, n)
new_args = Vector{Any}(n)
for j in 1:n
if j == i
new_args[j] = differentiate(args[j], wrt)
Expand Down Expand Up @@ -200,7 +200,7 @@ export symbolic_derivatives_1arg

# deprecated: for backward compatibility with packages that used
# this unexported interface.
derivative_rules = Array(@Compat.compat(Tuple{Symbol,Expr}),0)
derivative_rules = Vector{Compat.@compat(Tuple{Symbol,Expr})}(0)
for (s,ex) in symbolic_derivative_1arg_list
push!(derivative_rules, (s, :(xp*$ex)))
end
Expand Down Expand Up @@ -267,7 +267,7 @@ end

function differentiate(ex::Expr, targets::Vector{Symbol})
n = length(targets)
exprs = Array(Any, n)
exprs = Vector{Any}(n)
for i in 1:n
exprs[i] = differentiate(ex, targets[i])
end
Expand Down
4 changes: 2 additions & 2 deletions src/finite_difference.jl
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ function finite_difference{T <: Number}(f::Function,
x::Vector{T},
dtype::Symbol = :central)
# Allocate memory for gradient
g = Array(Float64, length(x))
g = Vector{Float64}(length(x))

# Mutate allocated gradient
finite_difference!(f, float(x), g, dtype)
Expand Down Expand Up @@ -270,7 +270,7 @@ function finite_difference_hessian{T <: Number}(f::Function,
n = length(x)

# Allocate an empty Hessian
H = Array(Float64, n, n)
H = Matrix{Float64}(n, n)

# Mutate the allocated Hessian
finite_difference_hessian!(f, x, H)
Expand Down