Skip to content

Commit

Permalink
Generalize call to Base.LinAlg.chksquare for v"0.5"
Browse files Browse the repository at this point in the history
  • Loading branch information
dmbates committed Feb 11, 2016
1 parent c0b5c41 commit d334e68
Show file tree
Hide file tree
Showing 8 changed files with 24 additions and 21 deletions.
9 changes: 6 additions & 3 deletions src/MixedModels.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#VERSION >= v"0.4.0-dev+6521" && __precompile__()
# __precompile__()

module MixedModels

using DataArrays, DataFrames, Distributions, NLopt, Showoff, StatsBase
using DataArrays, DataFrames, Distributions, NLopt, Showoff, StatsBase, GLM

export ReMat, ScalarReMat,VectorReMat

Expand All @@ -14,6 +14,7 @@ export LinearMixedModel,
BIC, # Schwatz's Bayesian Information Criterion
bootstrap, # Create bootstrap replications of a model
fixef, # extract the fixed-effects parameter estimates
glmm, # create a GeneralizedLinearMixedModel from formula, data, distribution, link
lmm, # create a LinearMixedModel from a formula/data specification
lowerbd, # lower bounds on the covariance parameters
npar, # total number of parameters in the model
Expand All @@ -27,6 +28,8 @@ export LinearMixedModel,
simulate!, # simulate a new response and refit the model
varest # estimate of the residual variance

chksqr = VERSION < v"0.5-" ? Base.LinAlg.chksquare : Base.LinAlg.checksquare

abstract MixedModel <: RegressionModel # model with fixed and random effects

import Base: ==
Expand All @@ -42,7 +45,7 @@ include("inject.jl")
include("pls.jl")
include("logdet.jl")
include("bootstrap.jl")
include("GLMM/glmtools.jl")
#include("GLMM/glmtools.jl")
include("GLMM/PIRLS.jl")

end # module
2 changes: 1 addition & 1 deletion src/bootstrap.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ Regenerate the last column of `m.A` from `m.trms`
This should be called after updating parts of `m.trms[end]`, typically the response.
"""
function regenerateAend!(m::LinearMixedModel)
n = Base.LinAlg.chksquare(m.A)
n = chksqr(m.A)
trmn = m.trms[n]
for i in 1:n
Ac_mul_B!(m.A[i,n],m.trms[i],trmn)
Expand Down
4 changes: 2 additions & 2 deletions src/cfactor.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ Uses `inject!` (as opposed to `copy!`), `downdate!` (as opposed to `syrk!`
or `gemm!`) and recursive calls to `cfactor!`,
"""
function cfactor!(A::AbstractMatrix)
n = Base.LinAlg.chksquare(A)
n = chksqr(A)
for k = 1:n
Akk = A[k,k]
for i in 1:(k - 1)
Expand Down Expand Up @@ -135,7 +135,7 @@ end

function downdate!{T}(C::DenseMatrix{T},A::SparseMatrixCSC{T})
m,n = size(A)
if n Base.LinAlg.chksquare(C)
if n chksqr(C)
throw(DimensionMismatch("C is not square or size(C,2) ≠ size(A,2)"))
end
# FIXME: avoid allocation by caching a transposed matrix and just fill in the new values
Expand Down
2 changes: 1 addition & 1 deletion src/inflate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ end
inflate!(D::Diagonal{Float64}) = (d = D.diag; for i in eachindex(d) d[i] += 1 end; D)

function inflate!{T<:AbstractFloat}(A::StridedMatrix{T})
n = Base.LinAlg.chksquare(A)
n = chksqr(A)
for i in 1:n
@inbounds A[i,i] += 1
end
Expand Down
2 changes: 1 addition & 1 deletion src/inject.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ end

function inject!{T<:Real}(d::StridedMatrix{T}, s::Diagonal{T})
sd = s.diag
if length(sd) Base.LinAlg.chksquare(d) # why does d have to be square?
if length(sd) chksqr(d) # why does d have to be square?
throw(DimensionMismatch("size(d,2) ≠ size(s,2)"))
end
fill!(d,zero(T))
Expand Down
2 changes: 1 addition & 1 deletion src/linalg.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ function Base.LinAlg.Ac_ldiv_B!{T}(A::UpperTriangular{T,HBlkDiag{T}},B::DenseMat
m,n = size(B)
aa = A.data.arr
r,s,k = size(aa)
if m Base.LinAlg.chksquare(A)
if m chksqr(A)
throw(DimensionMismatch("size(A,2) ≠ size(B,1)"))
end
scr = Array(T,(r,n))
Expand Down
2 changes: 1 addition & 1 deletion src/logdet.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ end

function LD{T}(d::DenseMatrix{T})
r = log(one(T))
n = Base.LinAlg.chksquare(d)
n = chksqr(d)
for j in 1:n
r += log(d[j,j])
end
Expand Down
22 changes: 11 additions & 11 deletions src/paramlowertriangular.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
nlower(n::Integer) = (n*(n+1))>>1
nlower{T}(A::LowerTriangular{T,Matrix{T}}) = nlower(Base.LinAlg.chksquare(A))
nlower{T}(A::LowerTriangular{T,Matrix{T}}) = nlower(chksqr(A))

"""
return the lower triangle as a vector (column-major ordering)
Expand All @@ -8,7 +8,7 @@ function Base.getindex{T}(A::LowerTriangular{T,Matrix{T}},s::Symbol)
if s
throw(KeyError(s))
end
n = Base.LinAlg.chksquare(A)
n = chksqr(A)
res = Array(T,nlower(n))
k = 0
for j = 1:n, i in j:n
Expand All @@ -28,7 +28,7 @@ function Base.setindex!{T}(
if s
throw(KeyError(s))
end
n = Base.LinAlg.chksquare(A)
n = chksqr(A)
if length(v) nlower(n)
throw(DimensionMismatch("length(v) ≠ nlower(A)"))
end
Expand All @@ -43,7 +43,7 @@ end
lower bounds on the parameters (elements in the lower triangle)
"""
function lowerbd{T}(A::LowerTriangular{T,Matrix{T}})
n = Base.LinAlg.chksquare(A)
n = chksqr(A)
res = fill(convert(T,-Inf),nlower(n))
k = -n
for j in n+1:-1:2
Expand Down Expand Up @@ -71,7 +71,7 @@ scale B using the implicit expansion of A to a homogeneous block diagonal
function tscale!(A::LowerTriangular,B::HBlkDiag)
Ba = B.arr
r,s,k = size(Ba)
n = Base.LinAlg.chksquare(A)
n = chksqr(A)
if n r
throw(DimensionMismatch("size(A,2) ≠ blocksize of B"))
end
Expand All @@ -80,7 +80,7 @@ function tscale!(A::LowerTriangular,B::HBlkDiag)
end

function tscale!{T}(A::LowerTriangular{T},B::Diagonal{T})
if Base.LinAlg.chksquare(A) 1
if chksqr(A) 1
throw(DimensionMismatch("A must be a 1×1 LowerTriangular"))
end
scale!(A.data[1],B.diag)
Expand All @@ -101,7 +101,7 @@ function LT(A::VectorReMat)
end

function tscale!{T}(A::LowerTriangular{T},B::DenseVecOrMat{T})
if (l = Base.LinAlg.chksquare(A)) == 1
if (l = chksqr(A)) == 1
return scale!(A.data[1],B)
end
m,n = size(B,1),size(B,2) # this sets n = 1 when B is a vector
Expand All @@ -114,23 +114,23 @@ function tscale!{T}(A::LowerTriangular{T},B::DenseVecOrMat{T})
end

function tscale!{T}(A::LowerTriangular{T},B::SparseMatrixCSC{T})
if (l = Base.LinAlg.chksquare(A)) 1
if (l = chksqr(A)) 1
error("Code not yet written")
end
scale!(A.data[1],B.nzval)
B
end

function tscale!{T}(A::SparseMatrixCSC{T},B::LowerTriangular)
if (l = Base.LinAlg.chksquare(B)) != 1
if (l = chksqr(B)) != 1
error("Code not yet written")
end
scale!(A.nzval,B.data[1])
A
end

function tscale!{T}(A::Diagonal{T},B::LowerTriangular{T})
if (l = Base.LinAlg.chksquare(B)) 1
if (l = chksqr(B)) 1
throw(DimensionMismatch(
"in tscale!(A::Diagonal,B::LowerTriangular) B must be 1×1"))
end
Expand All @@ -148,7 +148,7 @@ function tscale!{T}(A::HBlkDiag{T},B::LowerTriangular{T})
end

function tscale!{T}(A::StridedMatrix{T},B::LowerTriangular{T})
l = Base.LinAlg.chksquare(B)
l = chksqr(B)
l == 1 && return scale!(A,B.data[1])
m,n = size(A)
q,r = divrem(n,l)
Expand Down

0 comments on commit d334e68

Please sign in to comment.