From 62ed83ca01d2dc56d79e5d38a45d5cf8908aa170 Mon Sep 17 00:00:00 2001 From: Douglas Bates Date: Mon, 2 Apr 2018 16:26:47 -0500 Subject: [PATCH] deprecate lmm and glmm, update docs --- benchmark/benchmarks.jl | 2 +- docs/jmd/MultipleTerms.jmd | 6 +- docs/jmd/SimpleLMM.jmd | 64 +- docs/jmd/SingularCovariance.jmd | 6 +- docs/jmd/SubjectItem.jmd | 2 +- docs/jmd/bootstrap.jmd | 59 +- docs/jmd/constructors.jmd | 32 +- docs/jmd/optimization.jmd | 28 +- docs/src/MultipleTerms.md | 49 +- docs/src/SimpleLMM.md | 206 ++- docs/src/SingularCovariance.md | 87 +- docs/src/SubjectItem.md | 104 +- docs/src/assets/MultipleTerms_10_1.svg | 40 +- docs/src/assets/MultipleTerms_16_1.svg | 8 +- docs/src/assets/MultipleTerms_26_1.svg | 28 +- docs/src/assets/MultipleTerms_27_1.svg | 28 +- docs/src/assets/MultipleTerms_8_1.svg | 28 +- docs/src/assets/MultipleTerms_9_1.svg | 28 +- docs/src/assets/SimpleLMM_4_1.svg | 268 ++-- docs/src/assets/SimpleLMM_53_1.svg | 78 +- docs/src/assets/SimpleLMM_54_1.svg | 30 +- docs/src/assets/SimpleLMM_55_1.svg | 30 +- docs/src/assets/SimpleLMM_56_1.svg | 30 +- docs/src/assets/SimpleLMM_57_1.svg | 30 +- docs/src/assets/SimpleLMM_71_1.svg | 46 +- docs/src/assets/SimpleLMM_72_1.svg | 46 +- docs/src/assets/SimpleLMM_7_1.svg | 266 ++-- docs/src/assets/SingularCovariance_14_1.svg | 8 +- docs/src/assets/SingularCovariance_15_1.svg | 8 +- docs/src/assets/SingularCovariance_16_1.svg | 82 +- docs/src/assets/SingularCovariance_17_1.svg | 40 +- docs/src/assets/SingularCovariance_19_1.svg | 42 +- docs/src/assets/SingularCovariance_21_1.svg | 40 +- docs/src/assets/SingularCovariance_32_1.svg | 132 +- docs/src/assets/SingularCovariance_35_1.svg | 102 +- docs/src/assets/SingularCovariance_40_1.svg | 342 ++--- docs/src/assets/SingularCovariance_46_1.svg | 1056 ++++++++------ docs/src/assets/bootstrap_6_1.svg | 64 + docs/src/assets/bootstrap_7_1.svg | 60 + docs/src/assets/bootstrap_8_1.svg | 219 ++- docs/src/bootstrap.md | 103 +- docs/src/constructors.md | 130 +- docs/src/optimization.md | 1393 ++++++++++++------- src/MixedModels.jl | 14 +- src/PIRLS.jl | 25 +- src/deprecates.jl | 3 + src/mixedmodel.jl | 2 +- src/modelterms.jl | 2 +- src/pls.jl | 38 +- test/pirls.jl | 23 +- test/pls.jl | 22 +- 51 files changed, 3286 insertions(+), 2293 deletions(-) create mode 100644 docs/src/assets/bootstrap_6_1.svg create mode 100644 docs/src/assets/bootstrap_7_1.svg create mode 100644 src/deprecates.jl diff --git a/benchmark/benchmarks.jl b/benchmark/benchmarks.jl index ad6b39c94..93ce8c46a 100644 --- a/benchmark/benchmarks.jl +++ b/benchmark/benchmarks.jl @@ -72,7 +72,7 @@ const mods = Dict{Symbol,Vector{Expr}}( :star => [] # not sure it is worthwhile working with these data ); -fitbobyqa(rhs::Expr, dsname::Symbol) = fit!(lmm(Formula(:Y, rhs), dat[dsname])) +fitbobyqa(rhs::Expr, dsname::Symbol) = fit(LinearMixedModel, Formula(:Y, rhs), dat[dsname]) compactstr(ds,rhs) = replace(string(ds, ':', rhs), ' ', "") SUITE["simplescalar"] = BenchmarkGroup(["single", "simple", "scalar"]) diff --git a/docs/jmd/MultipleTerms.jmd b/docs/jmd/MultipleTerms.jmd index 6c46c1ad9..cb1047ba6 100644 --- a/docs/jmd/MultipleTerms.jmd +++ b/docs/jmd/MultipleTerms.jmd @@ -7,7 +7,7 @@ In this chapter we consider models with multiple simple, scalar random-effects t ```{julia;term=true} using DataFrames, Distributions, FreqTables, Gadfly, MixedModels, RData -using Gadfly.Geom: density, histogram, point +using Gadfly.Geom: density, histogram, line, point using Gadfly.Guide: xlabel, ylabel const dat = convert(Dict{Symbol,DataFrame}, load(Pkg.dir("MixedModels", "test", "dat.rda"))); const ppt250 = inv(500) : inv(250) : 1.; @@ -81,7 +81,7 @@ Even when we apply each of the six samples to each of the 24 plates, something c A model incorporating random effects for both the plate and the sample is straightforward to specify — we include simple, scalar random effects terms for both these factors. ```{julia;term=true} -penm = fit!(lmm(@formula(Y ~ 1 + (1|G) + (1|H)), dat[:Penicillin])) +penm = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G) + (1|H)), dat[:Penicillin]) ``` This model display indicates that the sample-to-sample variability has the greatest contribution, then plate-to-plate variability and finally the “residual” variability that cannot be attributed to either the sample or the plate. These conclusions are consistent with what we see in the data plot (Fig. [fig:Penicillindot]). @@ -406,7 +406,7 @@ it is sufficiently diffuse to warrant treating it as if it were a continuous res At this point we will fit models that have random effects for student, instructor, and department (or the combination) to these data. In the next chapter we will fit models incorporating fixed-effects for instructor and department to these data. ```{julia;term=true} -@time instm = fit!(lmm(@formula(Y ~ 1 + A + (1|G) + (1|H) + (1|I)), dat[:InstEval])) +@time instm = fit(LinearMixedModel, @formula(Y ~ 1 + A + (1|G) + (1|H) + (1|I)), dat[:InstEval]) ``` (Fitting this complex model to a moderately large data set takes a few seconds on a modest desktop computer. Although this is more time than required for earlier model fits, it is a remarkably short time for fitting a model of this size and complexity. In some ways it is remarkable that such a model can be fit at all on such a computer.) diff --git a/docs/jmd/SimpleLMM.jmd b/docs/jmd/SimpleLMM.jmd index 7c14c1077..8d16cf4a0 100644 --- a/docs/jmd/SimpleLMM.jmd +++ b/docs/jmd/SimpleLMM.jmd @@ -69,9 +69,9 @@ The data are described in Davies (), the fourth edition of the book mentioned ab First attach the packages to be used ```{julia;term=true} -using DataFrames, Distributions, Gadfly, GLM, MixedModels, RData +using DataFrames, Distributions, Gadfly, GLM, MixedModels, RData, RCall ``` -and allow for unqualified names for some graphics functions. +and allow for unqualified names for some graphics functions ```{julia;term=true} using Gadfly.Geom: point, line, histogram, density, vline using Gadfly.Guide: xlabel, ylabel, yticks @@ -84,11 +84,11 @@ Access the `Dyestuff` data ```{julia;term=true} const dat = convert(Dict{Symbol,DataFrame}, load(Pkg.dir("MixedModels", "test", "dat.rda"))); dyestuff = dat[:Dyestuff]; -dump(dyestuff) +describe(dyestuff) ``` and plot it ```{julia;echo=false;fig_cap="Yield versus Batch for the Dyestuff data"; fig_width=8;} -plot(dyestuff, x = "Y", y = "G", point, xlabel("Yield of dyestuff (g)"), ylabel("Batch")) +plot(dyestuff, x = :Y, y = :G, point, xlabel("Yield of dyestuff (g)"), ylabel("Batch")) ``` In the dotplot we can see that there is considerable variability in yield, even for preparations from the same batch, but there is also noticeable batch-to-batch variability. @@ -99,11 +99,9 @@ In a plot, however, the order of the levels influences the perception of the pat Rather than providing an arbitrary pattern it is best to order the levels according to some criterion for the plot. In this case a good choice is to order the batches by increasing mean yield, which can be easily done in R. -(Note: at present this plot fails because of the ongoing DataFrames conversion.) - ```{julia;term=true} -#dyestuff = rcopy("within(Dyestuff, Batch <- reorder(Batch, Yield, mean))"); -#plot(dyestuff, x="Y", y="G", point, xlabel("Yield of dyestuff (g)")) +dyestuffR = rcopy(R"within(lme4::Dyestuff, Batch <- reorder(Batch, Yield, mean))"); +plot(dyestuffR, x = :Yield, y = :Batch, point, xlabel("Yield of dyestuff (g)"), ylabel("Batch")) ``` In Sect. [sec:DyestuffLMM] we will use mixed models to quantify the variability in yield between batches. @@ -122,11 +120,11 @@ The data are simulated data presented in Box and Tiao (1973), where the authors The structure and summary are intentionally similar to those of the `Dyestuff` data. ```{julia;term=true} dyestuff2 = dat[:Dyestuff2]; -dump(dyestuff2) +describe(dyestuff2) ``` As can be seen in a data plot ```{julia;echo=false;fig_width=8} -plot(dyestuff2, x = "Y", y = "G", point, xlabel("Simulated response"), ylabel("")) +plot(dyestuff2, x = :Y, y = :G, point, xlabel("Simulated response"), ylabel("")) ``` the batch-to-batch variability in these data is small compared to the within-batch variability. In some approaches to mixed models it can be difficult to fit models to such data. @@ -144,7 +142,7 @@ The structure of the formula will be explained after showing the example. A model allowing for an overall level of the `Yield` and for an additive random effect for each level of `Batch` can be fit as ```{julia;term=true} -mm1 = fit!(lmm(@formula(Y ~ 1 + (1 | G)), dyestuff)) +mm1 = fit(LinearMixedModel, @formula(Y ~ 1 + (1 | G)), dyestuff) ``` As shown in the summary of the model fit, the default estimation criterion is maximum likelihood. @@ -187,7 +185,7 @@ The standard error of the intercept estimate is 17.69 g. Fitting a similar model to the `dyestuff2` data produces an estimate $\widehat{\sigma_1^2}=0$. ```{julia;term=true} -mm2 = fit!(lmm(@formula(Y ~ 1 + (1 | G)), dyestuff2)) +mm2 = fit(LinearMixedModel, @formula(Y ~ 1 + (1 | G)), dyestuff2) ``` An estimate of `0` for $\sigma_1$ does not mean that there is no variation between the groups. @@ -335,7 +333,7 @@ For a linear mixed model, where all the conditional and unconditional distributi The optional second argument, `verbose`, in a call to `fit!` of a `LinearMixedModel` object produces output showing the progress of the iterative optimization of $\tilde{d}(\bf\theta|\bf y)$. ```{julia;term=true} -mm1 = fit!(lmm(@formula(Y ~ 1 + (1 | G)), dyestuff), true); +mm1 = fit!(LinearMixedModel(@formula(Y ~ 1 + (1 | G)), dyestuff), true); ``` The algorithm converges after 18 function evaluations to a profiled deviance of 327.32706 at $\theta=0.752581$. In this model the parameter $\theta$ is of length 1, the single element being the ratio $\sigma_1/\sigma$. @@ -349,7 +347,7 @@ mm1.optsum The full list of fields in a `LinearMixedModel` object is ```{julia;term=true} -fieldnames(LinearMixedModel) +showcompact(fieldnames(LinearMixedModel)) ``` The `formula` field is a copy of the model formula @@ -520,12 +518,12 @@ First set the random number seed for reproducibility. ```{julia;term=true} srand(1234321); -mm1bstp = bootstrap(10000, mm1); +mm1bstp = bootstrap(100000, mm1); size(mm1bstp) ``` ```{julia;term=true} -show(names(mm1bstp)) +showcompact(names(mm1bstp)) ``` #### Histograms, kernel density plots and quantile-quantile plots @@ -539,15 +537,15 @@ Finally, the extent to which the distribution of a sample can be approximated by The [`Gadfly`](https://github.com/GiovineItalia/Gadfly.jl) package for Julia uses a "grammar of graphics" specification, similar to the [`ggplot2`](http://ggplot2.org/) package for R. A histogram or a kernel density plot are describes as *geometries* and specified by `Geom.histogram` and `Geom.density`, respectively. ```{julia;term=true} -plot(mm1bstp, x = :β₁, Geom.histogram) +plot(mm1bstp, x = :β₁, histogram) ``` ```{julia;term=true} -plot(mm1bstp, x = :σ, Geom.histogram) +plot(mm1bstp, x = :σ, histogram) ``` ```{julia;term=true} -plot(mm1bstp, x = :σ₁, Geom.histogram) +plot(mm1bstp, x = :σ₁, histogram) ``` The last two histograms show that, even if the models are defined in terms of variances, the variance is usually not a good scale on which to assess the variability of the parameter estimates. The standard deviation or, in some cases, the logarithm of the standard deviation is a more suitable scale. @@ -561,7 +559,7 @@ length(mm1bstp[:θ₁]) - countnz(mm1bstp[:θ₁]) That is, nearly 1/10 of the `theta1` values are zeros. Because such a spike or pulse will be spread out or diffused in a kernel density plot, ```{julia;term=true} -plot(mm1bstp, x = :θ₁, Geom.density) +plot(mm1bstp, density, x = :θ₁) ``` such a plot is not suitable for a sample of a bounded parameter that includes values on the boundary. @@ -569,11 +567,11 @@ such a plot is not suitable for a sample of a bounded parameter that includes va The density of the estimates of the other two parameters, $\beta_1$ and $\sigma$, are depicted well in kernel density plots. ```{julia;term=true} -plot(mm1bstp, x = :β₁, Geom.density) +plot(mm1bstp, density, x = :β₁) ``` ```{julia;term=true} -plot(mm1bstp, x = :σ, Geom.density) +plot(mm1bstp, density, x = :σ) ``` The standard approach of summarizing a sample by its mean and standard deviation, or of constructing a confidence interval using the sample mean, the standard error of the mean and quantiles of a *t* or normal distribution, are based on the assumption that the sample is approximately normal (also called Gaussian) in shape. A *normal probability plot*, which plots sample quantiles versus quantiles of the standard normal distribution, $\mathcal{N}(0,1)$, can be used to assess the validity of this assumption. If the points fall approximately along a straight line, the assumption of normality should be valid. Systematic departures from a straight line are cause for concern. @@ -597,34 +595,34 @@ The kernel density estimate of $\sigma$ is more symmetric ```{julia;echo=false;fig_width=8} zquantiles = quantile(Normal(), ppt250); -plot(x = zquantiles, y = quantile(mm1bstp[:β₁], ppt250), Geom.line, - Guide.xlabel("Standard Normal Quantiles"), Guide.ylabel("β₁")) +plot(x = zquantiles, y = quantile(mm1bstp[:β₁], ppt250), line) +# Guide.xlabel("Standard Normal Quantiles"), Guide.ylabel("β₁")) ``` and the normal probability plot of $\sigma$ is also reasonably straight. ```{julia;echo=false;fig_width=8} -plot(x = zquantiles, y = quantile(mm1bstp[:σ], ppt250), Geom.line, - Guide.xlabel("Standard Normal quantiles"), Guide.ylabel("σ")) +plot(x = zquantiles, y = quantile(mm1bstp[:σ], ppt250), line, + xlabel("Standard Normal quantiles"), ylabel("σ")) ``` The normal probability plot of $\sigma_1$ has a flat section at $\sigma_1 = 0$. ```{julia;echo=false;fig_width=8} -plot(x = zquantiles, y = quantile(mm1bstp[:σ₁], ppt250), Geom.line, - Guide.xlabel("Standard Normal Quantiles"), Guide.ylabel("σ₁")) +plot(x = zquantiles, y = quantile(mm1bstp[:σ₁], ppt250), line, + xlabel("Standard Normal Quantiles"), ylabel("σ₁")) ``` In terms of the variances, $\sigma^2$ and $\sigma_1^2$, the normal probability plots are ```{julia;echo=false} -plot(x = zquantiles, y = quantile(abs2.(mm1bstp[:σ]), ppt250), Geom.line, - Guide.xlabel("Standard Normal quantiles"), Guide.ylabel("σ²")) +plot(x = zquantiles, y = quantile(abs2.(mm1bstp[:σ]), ppt250), line, + xlabel("Standard Normal quantiles"), ylabel("σ²")) ``` ```{julia;echo=false} -plot(x = zquantiles, y = quantile(abs2.(mm1bstp[:σ₁]), ppt250), Geom.line, - Guide.xlabel("Standard Normal Quantiles"), Guide.ylabel("σ₁²")) +plot(x = zquantiles, y = quantile(abs2.(mm1bstp[:σ₁]), ppt250), line, + xlabel("Standard Normal Quantiles"), ylabel("σ₁²")) ``` ### Confidence intervals based on bootstrap samples @@ -741,7 +739,7 @@ The empirical cumulative distribution function (ecdf) of a sample maps the range plot(layer(x = quantile(mm1bstp[:σ₁], ppt250), y = ppt250, line), layer(xintercept = quantile(mm1bstp[:σ₁], [0.1, 0.9]), vline(color = colorant"orange")), layer(xintercept = hpdinterval(mm1bstp[:σ₁], 0.8), vline(color=colorant"red")), - ylabel(""), Guide.xlabel("σ₁"), yticks(ticks=[0.0, 0.1, 0.9, 1.0]) + ylabel(""), xlabel("σ₁"), yticks(ticks=[0.0, 0.1, 0.9, 1.0]) ) ``` diff --git a/docs/jmd/SingularCovariance.jmd b/docs/jmd/SingularCovariance.jmd index bb964f818..cdf95eebf 100644 --- a/docs/jmd/SingularCovariance.jmd +++ b/docs/jmd/SingularCovariance.jmd @@ -25,7 +25,7 @@ As is customary (though not required) in Julia, a function whose name ends in `! An optional second argument of `true` in the call to `fit!` produces verbose output from the optimization. ```{julia;term=true} -sleepm = fit!(lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]), true) +sleepm = fit!(LinearMixedModel(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]), true) ``` The variables in the optimization are the elements of a lower triangular matrix, $\Lambda$, which is the relative covariance factor of the random effects. @@ -274,7 +274,7 @@ show(names(oxboys)) ``` ```{julia;term=true} -oxboysm = fit!(lmm(@formula(height ~ 1 + age + (1+age | Subject)), oxboys)) +oxboysm = fit(LinearMixedModel, @formula(height ~ 1 + age + (1+age | Subject)), oxboys) ``` ```{julia;term=true} @@ -383,7 +383,7 @@ When the time origin is the beginning of the treatment there is not generally a ```{julia;term=true} early = rcopy(R"subset(Early, select = c(cog, tos, id, trt, trttos))"); -earlym = fit!(lmm(@formula(cog ~ 1 + tos + trttos + (1 + tos | id)), early)) +earlym = fit(LinearMixedModel, @formula(cog ~ 1 + tos + trttos + (1 + tos | id)), early) ``` The model converges to a singular covariance matrix for the random effects. diff --git a/docs/jmd/SubjectItem.jmd b/docs/jmd/SubjectItem.jmd index 21df79e05..86c7fd420 100644 --- a/docs/jmd/SubjectItem.jmd +++ b/docs/jmd/SubjectItem.jmd @@ -5,7 +5,7 @@ const dat = convert(Dict{Symbol,DataFrame}, load(Pkg.dir("MixedModels", "test", ``` ```{julia;term=true} -mm1 = fit!(lmm(@formula(Y ~ 1+S+T+U+V+W+X+Z+(1+S+T+U+V+W+X+Z|G)+(1+S+T+U+V+W+X+Z|H)), dat[:kb07])) +mm1 = fit(LinearMixedModel, @formula(Y ~ 1+S+T+U+V+W+X+Z+(1+S+T+U+V+W+X+Z|G)+(1+S+T+U+V+W+X+Z|H)), dat[:kb07]) ``` ```{julia;term=true} diff --git a/docs/jmd/bootstrap.jmd b/docs/jmd/bootstrap.jmd index 7a4f40daa..4ce2b01d9 100644 --- a/docs/jmd/bootstrap.jmd +++ b/docs/jmd/bootstrap.jmd @@ -22,63 +22,36 @@ parameter, `θ`, that defines the variance-covariance matrices of the random eff For example, a simple linear mixed-effects model for the `Dyestuff` data in the [`lme4`](http://github.com/lme4/lme4) package for [`R`](https://www.r-project.org) is fit by ```{julia;term=true} -using DataFrames, Gadfly, MixedModels, RData +using DataFrames, MixedModels, RData, Gadfly ``` ```{julia;echo=false;results="hidden"} const dat = convert(Dict{Symbol,DataFrame}, load(Pkg.dir("MixedModels", "test", "dat.rda"))); ``` ```{julia;term=true} ds = names!(dat[:Dyestuff], [:Batch, :Yield]) -m1 = fit!(lmm(@formula(Yield ~ 1 + (1 | Batch)), ds)) +m1 = fit(LinearMixedModel, @formula(Yield ~ 1 + (1 | Batch)), ds) ``` - -## Using the `bootstrap!` function - -This quick explanation is provided for those who only wish to use the `bootstrap!` method and do not need -detailed explanations of how it works. -The three arguments to `bootstrap!` are the matrix that will be overwritten with the results, the model to bootstrap, -and a function that overwrites a vector with the results of interest from the model. - -Suppose the objective is to obtain 100,000 parametric bootstrap samples of the estimates of the "variance -components", `σ²` and `σ₁²`, in this model. In many implementations of mixed-effects models the -estimate of `σ₁²`, the variance of the scalar random effects, is reported along with a -standard error, as if the estimator could be assumed to have a Gaussian distribution. -Is this a reasonable assumption? - -A suitable function to save the results is -```{julia;term=true} -function saveresults!(v, m) - v[1] = varest(m) - v[2] = abs2(getθ(m)[1]) * v[1] -end -``` -The `varest` extractor function returns the estimate of `σ²`. As seen above, the estimate of the -`σ₁` is the product of `Θ` and the estimate of `σ`. The expression `abs2(getΘ(m)[1])` evaluates to -`Θ²`. The `[1]` is necessary because the value returned by `getθ` is a vector and a scalar is needed -here. - -As with any simulation-based method, it is advisable to set the random number seed before calling -`bootstrap!` for reproducibility. +Now bootstrap the model parameters ```{julia;term=true;} -srand(1234321); -``` -```{julia;term=true;} -results = bootstrap!(zeros(2, 100000), m1, saveresults!); +results = bootstrap(100_000, m1); +showcompact(names(results)) ``` The results for each bootstrap replication are stored in the columns of the matrix passed in as the first -argument. A density plot of the first row using the [`Gadfly`](https://github.com/dcjones/Gadfly.jl) package -is created as +argument. A density plot of the bootstrapped values of `σ` is created as ```{julia;eval=false;term=true} -plot(x = view(results, 1, :), Geom.density(), Guide.xlabel("Parametric bootstrap estimates of σ²")) +plot(results, x = :σ, Geom.density, Guide.xlabel("Parametric bootstrap estimates of σ")) +``` +```{julia;echo=false;fig_cap="Density of parametric bootstrap estimates of σ from model m1"; fig_width=8;} +plot(results, x = :σ, Geom.density, Guide.xlabel("Parametric bootstrap estimates of σ")) ``` -```{julia;echo=false;fig_cap="Density of parametric bootstrap estimates of σ² from model m1"; fig_width=8;} -plot(x = view(results, 1, :), Geom.density(), Guide.xlabel("Parametric bootstrap estimates of σ²")) +```{julia;echo=false;fig_cap="Density of parametric bootstrap estimates of σ₁ from model m1"; fig_width=8;} +plot(results, x = :σ₁, Geom.density, Guide.xlabel("Parametric bootstrap estimates of σ₁")) ``` -```{julia;echo=false;fig_cap="Density of parametric bootstrap estimates of σ₁² from model m1"; fig_width=8;} -plot(x = view(results, 2, :), Geom.density(), Guide.xlabel("Parametric bootstrap estimates of σ₁²")) +```{julia;echo=false;fig_cap="Histogram of parametric bootstrap estimates of σ₁ from model m1"; fig_width=8;} +plot(results, x = :σ₁, Geom.histogram, Guide.xlabel("Parametric bootstrap estimates of σ₁")) ``` -The distribution of the bootstrap samples of `σ²` is a bit skewed but not terribly so. However, the -distribution of the bootstrap samples of the estimate of `σ₁²` is highly skewed and has a spike at +The distribution of the bootstrap samples of `σ` is a bit skewed but not terribly so. However, the +distribution of the bootstrap samples of the estimate of `σ₁` is highly skewed and has a spike at zero. diff --git a/docs/jmd/constructors.jmd b/docs/jmd/constructors.jmd index 64c868bef..e5497f728 100644 --- a/docs/jmd/constructors.jmd +++ b/docs/jmd/constructors.jmd @@ -1,9 +1,9 @@ # Model constructors -The `lmm` function creates a linear mixed-effects model representation from a `Formula` and an appropriate `data` type. -At present the data type must be a `DataFrame` but this is expected to change. +The `LinearMixedModel` type represents a linear mixed-effects model. +Typically it is constructed from a `Formula` and an appropriate `data` type, usually a `DataFrame`. ```@docs -lmm +LinearMixedModel ``` ## Examples of linear mixed-effects model fits @@ -28,14 +28,14 @@ Categorical covariates not suitable as grouping factors are named starting with The formula language in *Julia* is similar to that in *R* except that the formula must be enclosed in a call to the `@formula` macro. A basic model with simple, scalar random effects for the levels of `G` (the batch of an intermediate product, in this case) is declared and fit as ```{julia;term=true} -fm1 = fit!(lmm(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff])) +fm1 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G)), dat[:Dyestuff]) ``` (If you are new to Julia you may find that this first fit takes an unexpectedly long time, due to Just-In-Time (JIT) compilation of the code. The second and subsequent calls to such functions are much faster.) ```{julia;term=true} -@time fit!(lmm(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff2])) +@time fit(LinearMixedModel, @formula(Y ~ 1 + (1|G)), dat[:Dyestuff2]) ``` ### Simple, scalar random effects @@ -54,12 +54,12 @@ It corresponds to a shift in the intercept for each level of the grouping factor The *sleepstudy* data are observations of reaction time, `Y`, on several subjects, `G`, after 0 to 9 days of sleep deprivation, `U`. A model with random intercepts and random slopes for each subject, allowing for within-subject correlation of the slope and intercept, is fit as ```{julia;term=true} -fm2 = fit!(lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy])) +fm2 = fit(LinearMixedModel, @formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]) ``` A model with uncorrelated random effects for the intercept and slope by subject is fit as ```{julia;term=true} -fm3 = fit!(lmm(@formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy])) +fm3 = fit(LinearMixedModel, @formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy]) ``` Although technically there are two random-effects *terms* in the formula for *fm3* both have the same grouping factor @@ -70,33 +70,33 @@ and, internally, are amalgamated into a single vector-valued term. A model for the *Penicillin* data incorporates random effects for the plate, `G`, and for the sample, `H`. As every sample is used on every plate these two factors are *crossed*. ```{julia;term=true} -fm4 = fit!(lmm(@formula(Y ~ 1 + (1|G) + (1|H)), dat[:Penicillin])) +fm4 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G) + (1|H)), dat[:Penicillin]) ``` In contrast the sample, `G`, grouping factor is *nested* within the batch, `H`, grouping factor in the *Pastes* data. That is, each level of `G` occurs in conjunction with only one level of `H`. ```{julia;term=true} -fm5 = fit!(lmm(@formula(Y ~ 1 + (1|G) + (1|H)), dat[:Pastes])) +fm5 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G) + (1|H)), dat[:Pastes]) ``` In observational studies it is common to encounter *partially crossed* grouping factors. For example, the *InstEval* data are course evaluations by students, `G`, of instructors, `H`. Additional covariates include the academic department, `I`, in which the course was given and `A`, whether or not it was a service course. ```{julia;term=true} -fm6 = fit!(lmm(@formula(Y ~ 1 + A * I + (1|G) + (1|H)), dat[:InstEval])) +fm6 = fit(LinearMixedModel, @formula(Y ~ 1 + A * I + (1|G) + (1|H)), dat[:InstEval]) ``` ## Fitting generalized linear mixed models -To create a GLMM using +To create a GLMM representation ```@docs -glmm +GeneralizedLinearMixedModel ``` the distribution family for the response, and possibly the link function, must be specified. ```{julia;term=true} -gm1 = fit!(glmm(@formula(r2 ~ 1 + a + g + b + s + m + (1|id) + (1|item)), dat[:VerbAgg], - Bernoulli())) +gm1 = fit(GeneralizedLinearMixedModel, @formula(r2 ~ 1 + a + g + b + s + m + (1|id) + (1|item)), + dat[:VerbAgg], Bernoulli()) ``` The canonical link, which is `GLM.LogitLink` for the `Bernoulli` distribution, is used if no explicit link is specified. @@ -179,8 +179,8 @@ The standard errors are the square roots of the diagonal elements of the estimat stderr ``` ```{julia;term=true} -show(stderr(fm2)) -show(stderr(gm1)) +show(StatsBase.stderr(fm2)) +show(StatsBase.stderr(gm1)) ``` Finally, the `coeftable` generic produces a table of coefficient estimates, their standard errors, and their ratio. diff --git a/docs/jmd/optimization.jmd b/docs/jmd/optimization.jmd index 75684a8a1..c0b0b0d76 100644 --- a/docs/jmd/optimization.jmd +++ b/docs/jmd/optimization.jmd @@ -101,12 +101,12 @@ For a linear mixed model, where all the conditional and unconditional distributi In the types of `LinearMixedModel` available through the `MixedModels` package, groups of random effects and the corresponding columns of the model matrix, $\bf Z$, are associated with *random-effects terms* in the model formula. For the simple example -```{julia;echo=false} +```{julia;echo=false;results="hidden"} using DataFrames, RData, MixedModels -const dat=convert(Dict{Symbol,DataFrame},load(Pkg.dir("MixedModels", "test", "dat.rda"))) +const dat=convert(Dict{Symbol,DataFrame},load(Pkg.dir("MixedModels", "test", "dat.rda"))); ``` ```{julia;term=true} -fm1 = fit!(lmm(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff])) +fm1 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G)), dat[:Dyestuff]) ``` the only random effects term in the formula is `(1|G)`, a simple, scalar random-effects term. ```{julia;term=true} @@ -120,7 +120,7 @@ This `ScalarFactorReTerm` contributes a block of columns to the model matrix $\b ```{julia;term=true} getθ(t1) getΛ(t1) -full(t1) +convert(Array{Int}, full(t1)) # matrix is floating point but all integers ``` Because there is only one random-effects term in the model, the matrix $\bf Z$ is the indicators matrix shown as the result of `full(t1)`, but stored in a special sparse format. @@ -130,7 +130,7 @@ For a `ScalarFactorReTerm` this block is a multiple of the identity, in this cas For a vector-valued random-effects term, as in ```{julia;term=true} -fm2 = fit!(lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy])) +fm2 = fit(LinearMixedModel, @formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]) t21 = fm2.trms[1] ``` the random-effects term `(1+U|G)` generates a @@ -139,7 +139,7 @@ VectorFactorReTerm ``` The model matrix $\bf Z$ for this model is ```{julia;term=true} -full(t21) +convert(Array{Int}, full(t21)) ``` and $\Lambda_\theta$ is a $36\times36$ block diagonal matrix with $18$ diagonal blocks, all of the form ```{julia;term=true} @@ -152,7 +152,7 @@ getθ(t21) Random-effects terms in the model formula that have the same grouping factor are amagamated into a single `VectorFactorReTerm` object. ```{julia;term=true} -fm3 = fit!(lmm(@formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy])) +fm3 = fit(LinearMixedModel, @formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy]) t31 = fm3.trms[1] ``` For this model the matrix $\bf Z$ is the same as that of model `fm2` but the diagonal blocks of $\Lambda_\theta$ are themselves diagonal. @@ -163,7 +163,7 @@ getθ(t31) Random-effects terms with distinct grouping factors generate distinct elements of the `trms` member of the `LinearMixedModel` object. Multiple `AbstractFactorReTerm` (i.e. either a `ScalarFactorReTerm` or a `VectorFactorReTerm`) objects are sorted by decreasing numbers of random effects. ```{julia;term=true} -fm4 = fit!(lmm(@formula(Y ~ 1 + (1|H) + (1|G)), dat[:Penicillin])) +fm4 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|H) + (1|G)), dat[:Penicillin]) t41 = fm4.trms[1] t42 = fm4.trms[2] ``` @@ -173,8 +173,8 @@ Note that the first `ScalarFactorReTerm` in `fm4.trms` corresponds to grouping f An optional `Bool` argument of `true` in the call to `fit!` of a `LinearMixedModel` causes printing of the objective and the $\theta$ parameter at each evaluation during the optimization. ```{julia;term=true} -fit!(lmm(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff]), true); -fit!(lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]), true); +fit!(LinearMixedModel(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff]), true); +fit!(LinearMixedModel(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]), true); ``` A shorter summary of the optimization process is always available as an @@ -193,7 +193,7 @@ To modify the optimization process the input fields can be changed after constru Suppose, for example, that the user wishes to try a [Nelder-Mead](https://en.wikipedia.org/wiki/Nelder%E2%80%93Mead_method) optimization method instead of the default [`BOBYQA`](https://en.wikipedia.org/wiki/BOBYQA) (Bounded Optimization BY Quadratic Approximation) method. ```{julia;term=true} -fm2 = lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]); +fm2 = LinearMixedModel(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]); fm2.optsum.optimizer = :LN_NELDERMEAD; fit!(fm2) fm2.optsum @@ -241,7 +241,7 @@ Poisson The `glmm` function generates, but does not fit, a `GeneralizedLinearMixedModel` object. ```{julia;term=true} -mdl = glmm(@formula(r2 ~ 1 + a + g + b + s + (1|id) + (1|item)), +mdl = GeneralizedLinearMixedModel(@formula(r2 ~ 1 + a + g + b + s + (1|id) + (1|item)), dat[:VerbAgg], Bernoulli()); typeof(mdl) ``` @@ -307,7 +307,7 @@ mdl.LMM.optsum As one would hope, given the name of the option, this fit is comparatively fast. ```{julia;term=true} -@time(fit!(glmm(@formula(r2 ~ 1 + a + g + b + s + (1 | id) + (1 | item)), +@time(fit!(GeneralizedLinearMixedModel(@formula(r2 ~ 1 + a + g + b + s + (1 | id) + (1 | item)), dat[:VerbAgg], Bernoulli()), fast=true)) ``` @@ -315,7 +315,7 @@ The alternative algorithm is to use PIRLS to find the conditional mode of the ra Because it is slower to incorporate the $\beta$ parameters in the general nonlinear optimization, the fast fit is performed first and used to determine starting estimates for the more general optimization. ```{julia;term=true} -@time mdl1 = fit!(glmm(@formula(r2 ~ 1+a+g+b+s+(1|id)+(1|item)), +@time mdl1 = fit!(GeneralizedLinearMixedModel(@formula(r2 ~ 1+a+g+b+s+(1|id)+(1|item)), dat[:VerbAgg], Bernoulli()), verbose = true) ``` diff --git a/docs/src/MultipleTerms.md b/docs/src/MultipleTerms.md index ed04ff0d6..96821c040 100644 --- a/docs/src/MultipleTerms.md +++ b/docs/src/MultipleTerms.md @@ -8,7 +8,7 @@ In this chapter we consider models with multiple simple, scalar random-effects t ````julia julia> using DataFrames, Distributions, FreqTables, Gadfly, MixedModels, RData -julia> using Gadfly.Geom: density, histogram, point +julia> using Gadfly.Geom: density, histogram, line, point julia> using Gadfly.Guide: xlabel, ylabel @@ -116,7 +116,7 @@ Even when we apply each of the six samples to each of the 24 plates, something c A model incorporating random effects for both the plate and the sample is straightforward to specify — we include simple, scalar random effects terms for both these factors. ````julia -julia> penm = fit!(lmm(@formula(Y ~ 1 + (1|G) + (1|H)), dat[:Penicillin])) +julia> penm = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G) + (1|H)), dat[:Penicillin]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + (1 | G) + (1 | H) logLik -2 logLik AIC BIC @@ -125,7 +125,7 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. G (Intercept) 0.7149795 0.8455646 - H (Intercept) 3.1351920 1.7706474 + H (Intercept) 3.1351924 1.7706474 Residual 0.3024264 0.5499331 Number of obs: 144; levels of grouping factors: 24, 6 @@ -164,7 +164,7 @@ or as the `Final parameter vector` in the `opsum` field of `penm` ````julia julia> penm.optsum Initial parameter vector: [1.0, 1.0] -Initial objective value: 364.6267798165791 +Initial objective value: 364.6267798165433 Optimizer (from NLopt): LN_BOBYQA Lower bounds: [0.0, 0.0] @@ -177,7 +177,7 @@ maxfeval: -1 Function evaluations: 44 Final parameter vector: [1.53758, 3.21975] -Final objective value: 332.1883486722809 +Final objective value: 332.1883486722732 Return code: FTOL_REACHED @@ -195,7 +195,7 @@ A bootstrap simulation of the model ````julia julia> @time penmbstp = bootstrap(10000, penm); - 9.382835 seconds (19.42 M allocations: 867.233 MiB, 4.28% gc time) + 13.519157 seconds (17.92 M allocations: 816.905 MiB, 5.69% gc time) ```` @@ -218,7 +218,6 @@ Plot(...) ```` -![](./assets//MultipleTerms_11_1.svg) @@ -357,8 +356,8 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. - G (Intercept) 8.4336166 2.90406897 - H (Intercept) 1.1991794 1.09507048 + G (Intercept) 8.4336167 2.90406898 + H (Intercept) 1.1991793 1.09507045 Residual 0.6780021 0.82340884 Number of obs: 60; levels of grouping factors: 30, 10 @@ -395,7 +394,7 @@ confirm this impression in that all the prediction intervals for the random effe julia> srand(4321234); julia> @time pstsbstp = bootstrap(10000, pstsm); - 7.142706 seconds (14.38 M allocations: 680.942 MiB, 2.69% gc time) + 9.520945 seconds (13.85 M allocations: 659.111 MiB, 2.39% gc time) ```` @@ -410,7 +409,6 @@ Plot(...) ```` -![](./assets//MultipleTerms_17_1.svg) ````julia julia> plot(x = pstsbstp[:σ₁], Geom.density(), Guide.xlabel("σ₁")) @@ -419,7 +417,6 @@ Plot(...) ```` -![](./assets//MultipleTerms_18_1.svg) ````julia julia> plot(x = pstsbstp[:σ₂], Geom.density(), Guide.xlabel("σ₂")) @@ -428,7 +425,6 @@ Plot(...) ```` -![](./assets//MultipleTerms_19_1.svg) @@ -442,11 +438,10 @@ Plot(...) ```` -![](./assets//MultipleTerms_20_1.svg) ````julia julia> count(x -> x < 1.0e-5, pstsbstp[:σ₂]) -3665 +3661 ```` @@ -498,9 +493,9 @@ Linear mixed model fit by maximum likelihood -124.20085 248.40170 254.40170 260.68473 Variance components: - Column Variance Std.Dev. - G (Intercept) 9.6328208 3.1036786 - Residual 0.6780001 0.8234076 + Column Variance Std.Dev. + G (Intercept) 9.63282135 3.1036787 + Residual 0.67800006 0.8234076 Number of obs: 60; levels of grouping factors: 30 Fixed-effects parameters: @@ -540,7 +535,7 @@ A bootstrap sample ````julia julia> @time psts1bstp = bootstrap(10000, pstsm1); - 2.722676 seconds (6.08 M allocations: 285.117 MiB, 3.40% gc time) + 3.810911 seconds (6.16 M allocations: 275.817 MiB, 2.57% gc time) ```` @@ -634,24 +629,24 @@ it is sufficiently diffuse to warrant treating it as if it were a continuous res At this point we will fit models that have random effects for student, instructor, and department (or the combination) to these data. In the next chapter we will fit models incorporating fixed-effects for instructor and department to these data. ````julia -julia> @time instm = fit!(lmm(@formula(Y ~ 1 + A + (1|G) + (1|H) + (1|I)), dat[:InstEval])) - 2.632963 seconds (28.59 k allocations: 188.855 MiB, 1.23% gc time) +julia> @time instm = fit(LinearMixedModel, @formula(Y ~ 1 + A + (1|G) + (1|H) + (1|I)), dat[:InstEval]) + 2.183438 seconds (342.65 k allocations: 200.968 MiB, 1.08% gc time) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + A + (1 | G) + (1 | H) + (1 | I) logLik -2 logLik AIC BIC -1.18860884×10⁵ 2.37721769×10⁵ 2.37733769×10⁵ 2.37788993×10⁵ Variance components: - Column Variance Std.Dev. - G (Intercept) 0.1059726808 0.32553445 - H (Intercept) 0.2652041233 0.51497973 - I (Intercept) 0.0061677025 0.07853472 - Residual 1.3864886097 1.17749251 + Column Variance Std.Dev. + G (Intercept) 0.1059725479 0.325534250 + H (Intercept) 0.2652049098 0.514980495 + I (Intercept) 0.0061678670 0.078535769 + Residual 1.3864885942 1.177492503 Number of obs: 73421; levels of grouping factors: 2972, 1128, 14 Fixed-effects parameters: Estimate Std.Error z value P(>|z|) -(Intercept) 3.28258 0.0284114 115.537 <1e-99 +(Intercept) 3.28258 0.0284116 115.537 <1e-99 A: 1 -0.0925886 0.0133832 -6.91828 <1e-11 diff --git a/docs/src/SimpleLMM.md b/docs/src/SimpleLMM.md index 1b7ecaf0b..7b5d9f2a6 100644 --- a/docs/src/SimpleLMM.md +++ b/docs/src/SimpleLMM.md @@ -69,14 +69,14 @@ The data are described in Davies (), the fourth edition of the book mentioned ab First attach the packages to be used ````julia -julia> using DataFrames, Distributions, Gadfly, GLM, MixedModels, RData +julia> using DataFrames, Distributions, Gadfly, GLM, MixedModels, RData, RCall ```` -and allow for unqualified names for some graphics functions. +and allow for unqualified names for some graphics functions ````julia julia> using Gadfly.Geom: point, line, histogram, density, vline @@ -97,10 +97,23 @@ julia> const dat = convert(Dict{Symbol,DataFrame}, load(Pkg.dir("MixedModels", " julia> dyestuff = dat[:Dyestuff]; -julia> dump(dyestuff) -DataFrames.DataFrame 30 observations of 2 variables - G: DataArrays.PooledDataArray{String,UInt8,1}(30) String["A", "A", "A", "A"] - Y: DataArrays.DataArray{Float64,1}(30) [1545.0, 1440.0, 1440.0, 1520.0] +julia> describe(dyestuff) +G +Summary Stats: +Length: 30 +Type: CategoricalArrays.CategoricalString{UInt8} +Number Unique: 6 + +Y +Summary Stats: +Mean: 1527.500000 +Minimum: 1440.000000 +1st Quartile: 1468.750000 +Median: 1530.000000 +3rd Quartile: 1575.000000 +Maximum: 1635.000000 +Length: 30 +Type: Float64 ```` @@ -121,11 +134,11 @@ In a plot, however, the order of the levels influences the perception of the pat Rather than providing an arbitrary pattern it is best to order the levels according to some criterion for the plot. In this case a good choice is to order the batches by increasing mean yield, which can be easily done in R. -(Note: at present this plot fails because of the ongoing DataFrames conversion.) - ````julia -julia> #dyestuff = rcopy("within(Dyestuff, Batch <- reorder(Batch, Yield, mean))"); -#plot(dyestuff, x="Y", y="G", point, xlabel("Yield of dyestuff (g)")) +julia> dyestuffR = rcopy(R"within(lme4::Dyestuff, Batch <- reorder(Batch, Yield, mean))"); + +julia> plot(dyestuffR, x = :Yield, y = :Batch, point, xlabel("Yield of dyestuff (g)"), ylabel("Batch")) +Plot(...) ```` @@ -150,10 +163,23 @@ The structure and summary are intentionally similar to those of the `Dyestuff` d ````julia julia> dyestuff2 = dat[:Dyestuff2]; -julia> dump(dyestuff2) -DataFrames.DataFrame 30 observations of 2 variables - G: DataArrays.PooledDataArray{String,UInt8,1}(30) String["A", "A", "A", "A"] - Y: DataArrays.DataArray{Float64,1}(30) [7.298, 3.846, 2.434, 9.566] +julia> describe(dyestuff2) +G +Summary Stats: +Length: 30 +Type: CategoricalArrays.CategoricalString{UInt8} +Number Unique: 6 + +Y +Summary Stats: +Mean: 5.665600 +Minimum: -0.892000 +1st Quartile: 2.765000 +Median: 5.365000 +3rd Quartile: 8.151000 +Maximum: 13.434000 +Length: 30 +Type: Float64 ```` @@ -181,7 +207,7 @@ The structure of the formula will be explained after showing the example. A model allowing for an overall level of the `Yield` and for an additive random effect for each level of `Batch` can be fit as ````julia -julia> mm1 = fit!(lmm(@formula(Y ~ 1 + (1 | G)), dyestuff)) +julia> mm1 = fit(LinearMixedModel, @formula(Y ~ 1 + (1 | G)), dyestuff) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + (1 | G) logLik -2 logLik AIC BIC @@ -189,7 +215,7 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. - G (Intercept) 1388.3333 37.260345 + G (Intercept) 1388.3332 37.260344 Residual 2451.2500 49.510100 Number of obs: 30; levels of grouping factors: 6 @@ -244,7 +270,7 @@ The standard error of the intercept estimate is 17.69 g. Fitting a similar model to the `dyestuff2` data produces an estimate $\widehat{\sigma_1^2}=0$. ````julia -julia> mm2 = fit!(lmm(@formula(Y ~ 1 + (1 | G)), dyestuff2)) +julia> mm2 = fit(LinearMixedModel, @formula(Y ~ 1 + (1 | G)), dyestuff2) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + (1 | G) logLik -2 logLik AIC BIC @@ -284,7 +310,7 @@ Even when the final fitted model is not singular, we must allow for such models It happens that this model corresponds to the linear model (i.e. a model with fixed-effects only) ````julia julia> lm1 = lm(@formula(Y ~ 1), dyestuff2) -DataFrames.DataFrameRegressionModel{GLM.LinearModel{GLM.LmResp{Array{Float64,1}},GLM.DensePredChol{Float64,Base.LinAlg.Cholesky{Float64,Array{Float64,2}}}},Array{Float64,2}} +StatsModels.DataFrameRegressionModel{GLM.LinearModel{GLM.LmResp{Array{Float64,1}},GLM.DensePredChol{Float64,Base.LinAlg.Cholesky{Float64,Array{Float64,2}}}},Array{Float64,2}} Formula: Y ~ +1 @@ -431,7 +457,7 @@ For a linear mixed model, where all the conditional and unconditional distributi The optional second argument, `verbose`, in a call to `fit!` of a `LinearMixedModel` object produces output showing the progress of the iterative optimization of $\tilde{d}(\bf\theta|\bf y)$. ````julia -julia> mm1 = fit!(lmm(@formula(Y ~ 1 + (1 | G)), dyestuff), true); +julia> mm1 = fit!(LinearMixedModel(@formula(Y ~ 1 + (1 | G)), dyestuff), true); f_1: 327.76702 [1.0] f_2: 331.03619 [1.75] f_3: 330.64583 [0.25] @@ -464,7 +490,7 @@ Whether or not verbose output is requested, the `optsum` field of a `LinearMixed ````julia julia> mm1.optsum Initial parameter vector: [1.0] -Initial objective value: 327.76702162461663 +Initial objective value: 327.7670216246183 Optimizer (from NLopt): LN_BOBYQA Lower bounds: [0.0] @@ -477,7 +503,7 @@ maxfeval: -1 Function evaluations: 18 Final parameter vector: [0.752581] -Final objective value: 327.3270598811344 +Final objective value: 327.3270598811364 Return code: FTOL_REACHED @@ -490,15 +516,8 @@ Return code: FTOL_REACHED The full list of fields in a `LinearMixedModel` object is ````julia -julia> fieldnames(LinearMixedModel) -6-element Array{Symbol,1}: - :formula - :trms - :sqrtwts - :A - :L - :optsum - +julia> showcompact(fieldnames(LinearMixedModel)) +Symbol[:formula, :trms, :sqrtwts, :A, :L, :optsum] ```` @@ -551,7 +570,7 @@ The last two elements are $\bf X$, the $n\times p$ model matrix for the fixed-ef ````julia julia> mm1.trms[end - 1] -MixedModels.MatrixTerm{Float64,Array{Float64,2}}([1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0], [1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0; 1.0], String["(Intercept)"]) +MixedModels.MatrixTerm{Float64,Array{Float64,2}}([1.0; 1.0; … ; 1.0; 1.0], [1.0; 1.0; … ; 1.0; 1.0], [1], 1, String["(Intercept)"]) ```` @@ -559,7 +578,7 @@ MixedModels.MatrixTerm{Float64,Array{Float64,2}}([1.0; 1.0; 1.0; 1.0; 1.0; 1.0; ````julia julia> mm1.trms[end] -MixedModels.MatrixTerm{Float64,Array{Float64,2}}([1545.0; 1440.0; 1440.0; 1520.0; 1580.0; 1540.0; 1555.0; 1490.0; 1560.0; 1495.0; 1595.0; 1550.0; 1605.0; 1510.0; 1560.0; 1445.0; 1440.0; 1595.0; 1465.0; 1545.0; 1595.0; 1630.0; 1515.0; 1635.0; 1625.0; 1520.0; 1455.0; 1450.0; 1480.0; 1445.0], [1545.0; 1440.0; 1440.0; 1520.0; 1580.0; 1540.0; 1555.0; 1490.0; 1560.0; 1495.0; 1595.0; 1550.0; 1605.0; 1510.0; 1560.0; 1445.0; 1440.0; 1595.0; 1465.0; 1545.0; 1595.0; 1630.0; 1515.0; 1635.0; 1625.0; 1520.0; 1455.0; 1450.0; 1480.0; 1445.0], String[""]) +MixedModels.MatrixTerm{Float64,Array{Float64,2}}([1545.0; 1440.0; … ; 1480.0; 1445.0], [1545.0; 1440.0; … ; 1480.0; 1445.0], [1], 0, String[""]) ```` @@ -571,7 +590,7 @@ The elements of `trms` before the last two represent vertical sections of $\bf Z ````julia julia> mm1.trms[1] -MixedModels.ScalarFactorReTerm{Float64,String,UInt8}(String["A", "A", "A", "A", "A", "B", "B", "B", "B", "B", "C", "C", "C", "C", "C", "D", "D", "D", "D", "D", "E", "E", "E", "E", "E", "F", "F", "F", "F", "F"], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], :G, String["(Intercept)"], 0.7525806752871207) +MixedModels.ScalarFactorReTerm{Float64,String,UInt8}(CategoricalArrays.CategoricalString{UInt8}["A", "A", "A", "A", "A", "B", "B", "B", "B", "B" … "E", "E", "E", "E", "E", "F", "F", "F", "F", "F"], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 … 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 … 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], :G, String["(Intercept)"], 0.7525806571450242) ```` @@ -584,7 +603,7 @@ In small examples the structure is more obvious when the `ScalarReMat` is conver ````julia julia> sparse(mm1.trms[1]) -30×6 SparseMatrixCSC{Float64,Int64} with 30 stored entries: +30×6 SparseMatrixCSC{Float64,Int32} with 30 stored entries: [1 , 1] = 1.0 [2 , 1] = 1.0 [3 , 1] = 1.0 @@ -592,21 +611,7 @@ julia> sparse(mm1.trms[1]) [5 , 1] = 1.0 [6 , 2] = 1.0 [7 , 2] = 1.0 - [8 , 2] = 1.0 - [9 , 2] = 1.0 - [10, 2] = 1.0 - [11, 3] = 1.0 - [12, 3] = 1.0 - [13, 3] = 1.0 - [14, 3] = 1.0 - [15, 3] = 1.0 - [16, 4] = 1.0 - [17, 4] = 1.0 - [18, 4] = 1.0 - [19, 4] = 1.0 - [20, 4] = 1.0 - [21, 5] = 1.0 - [22, 5] = 1.0 + ⋮ [23, 5] = 1.0 [24, 5] = 1.0 [25, 5] = 1.0 @@ -633,17 +638,7 @@ julia> full(mm1.trms[1]) 0.0 1.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 0.0 + ⋮ ⋮ 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 @@ -762,14 +757,14 @@ The `L` field is a blocked matrix like the `A` field containing the upper Choles ````julia julia> mm1.L 8×8 LowerTriangular{Float64,BlockArrays.BlockArray{Float64,2,AbstractArray{Float64,2}}}: - 1.95752 ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ - 0.0 1.95752 ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ - 0.0 0.0 1.95752 ⋅ ⋅ ⋅ ⋅ ⋅ - 0.0 0.0 0.0 1.95752 ⋅ ⋅ ⋅ ⋅ - 0.0 0.0 0.0 0.0 1.95752 ⋅ ⋅ ⋅ - 0.0 0.0 0.0 0.0 0.0 1.95752 ⋅ ⋅ - 1.92228 1.92228 1.92228 1.92228 1.92228 1.92228 2.79804 ⋅ - 2893.03 2937.24 3006.45 2879.58 3075.65 2825.75 4274.01 271.178 + 1.95752 ⋅ ⋅ … ⋅ ⋅ ⋅ + 0.0 1.95752 ⋅ ⋅ ⋅ ⋅ + 0.0 0.0 1.95752 ⋅ ⋅ ⋅ + 0.0 0.0 0.0 ⋅ ⋅ ⋅ + 0.0 0.0 0.0 ⋅ ⋅ ⋅ + 0.0 0.0 0.0 … 1.95752 ⋅ ⋅ + 1.92228 1.92228 1.92228 1.92228 2.79804 ⋅ + 2893.03 2937.24 3006.45 2825.75 4274.01 271.178 ```` @@ -840,7 +835,7 @@ All the information needed to evaluate the profiled log-likelihood is available ````julia julia> 2 * sum(log.(diag(mm1.L.data[Block(1,1)]))) -8.060146362820694 +8.060146149034855 ```` @@ -864,7 +859,7 @@ The penalized residual sum of squares is the square of the single element of the ````julia julia> abs2(mm1.L.data[Block(3, 3)][1, 1]) -73537.50049200655 +73537.50101605429 ```` @@ -872,7 +867,7 @@ julia> abs2(mm1.L.data[Block(3, 3)][1, 1]) ````julia julia> pwrss(mm1) -73537.50049200655 +73537.50101605429 ```` @@ -884,7 +879,7 @@ The objective is ````julia julia> logdet(mm1) + nobs(mm1) * (1 + log(2π * pwrss(mm1) / nobs(mm1))) -327.3270598811344 +327.3270598811364 ```` @@ -903,17 +898,17 @@ First set the random number seed for reproducibility. ````julia julia> srand(1234321); -julia> mm1bstp = bootstrap(10000, mm1); +julia> mm1bstp = bootstrap(100000, mm1); julia> size(mm1bstp) -(10000, 5) +(100000, 5) ```` ````julia -julia> show(names(mm1bstp)) +julia> showcompact(names(mm1bstp)) Symbol[:obj, :σ, :β₁, :θ₁, :σ₁] ```` @@ -932,31 +927,28 @@ Finally, the extent to which the distribution of a sample can be approximated by The [`Gadfly`](https://github.com/GiovineItalia/Gadfly.jl) package for Julia uses a "grammar of graphics" specification, similar to the [`ggplot2`](http://ggplot2.org/) package for R. A histogram or a kernel density plot are describes as *geometries* and specified by `Geom.histogram` and `Geom.density`, respectively. ````julia -julia> plot(mm1bstp, x = :β₁, Geom.histogram) +julia> plot(mm1bstp, x = :β₁, histogram) Plot(...) ```` -![](./assets//SimpleLMM_45_1.svg) ````julia -julia> plot(mm1bstp, x = :σ, Geom.histogram) +julia> plot(mm1bstp, x = :σ, histogram) Plot(...) ```` -![](./assets//SimpleLMM_46_1.svg) ````julia -julia> plot(mm1bstp, x = :σ₁, Geom.histogram) +julia> plot(mm1bstp, x = :σ₁, histogram) Plot(...) ```` -![](./assets//SimpleLMM_47_1.svg) @@ -966,7 +958,7 @@ The histogram of $\sigma_1^2$ has a "spike" at zero. Because the value of $\sig ````julia julia> length(mm1bstp[:θ₁]) - countnz(mm1bstp[:θ₁]) -941 +10090 ```` @@ -977,13 +969,12 @@ julia> length(mm1bstp[:θ₁]) - countnz(mm1bstp[:θ₁]) That is, nearly 1/10 of the `theta1` values are zeros. Because such a spike or pulse will be spread out or diffused in a kernel density plot, ````julia -julia> plot(mm1bstp, x = :θ₁, Geom.density) +julia> plot(mm1bstp, density, x = :θ₁) Plot(...) ```` -![](./assets//SimpleLMM_49_1.svg) @@ -992,22 +983,20 @@ such a plot is not suitable for a sample of a bounded parameter that includes va The density of the estimates of the other two parameters, $\beta_1$ and $\sigma$, are depicted well in kernel density plots. ````julia -julia> plot(mm1bstp, x = :β₁, Geom.density) +julia> plot(mm1bstp, density, x = :β₁) Plot(...) ```` -![](./assets//SimpleLMM_50_1.svg) ````julia -julia> plot(mm1bstp, x = :σ, Geom.density) +julia> plot(mm1bstp, density, x = :σ) Plot(...) ```` -![](./assets//SimpleLMM_51_1.svg) @@ -1042,6 +1031,7 @@ The kernel density estimate of $\sigma$ is more symmetric + and the normal probability plot of $\sigma$ is also reasonably straight. ![](./assets//SimpleLMM_54_1.svg) @@ -1078,8 +1068,8 @@ One possible interval containing 95% of the sample is $(\sigma_{[1]}, \sigma_{[9 ````julia julia> sigma95 = quantile(mm1bstp[:σ], [0.025, 0.975]) 2-element Array{Float64,1}: - 35.3694 - 62.9763 + 35.5837 + 63.099 ```` @@ -1145,8 +1135,8 @@ For example, the 95% HPD interval calculated from the sample of $\beta_1$ values ````julia julia> hpdinterval(mm1bstp[:β₁]) 2-element Array{Float64,1}: - 1492.49 - 1561.32 + 1493.01 + 1562.08 ```` @@ -1159,8 +1149,8 @@ which is very close to the central probability interval of ````julia julia> quantile(mm1bstp[:β₁], [0.025, 0.975]) 2-element Array{Float64,1}: - 1492.45 - 1561.28 + 1492.85 + 1561.92 ```` @@ -1175,8 +1165,8 @@ The HPD interval on $\sigma^2$ is ````julia julia> hpdinterval(abs2.(mm1bstp[:σ])) 2-element Array{Float64,1}: - 1068.03 - 3745.88 + 1162.81 + 3834.32 ```` @@ -1189,8 +1179,8 @@ which is shifted to the left relative to the central probability interval ````julia julia> quantile(abs2.(mm1bstp[:σ]), [0.025, 0.975]) 2-element Array{Float64,1}: - 1250.99 - 3966.02 + 1266.2 + 3981.48 ```` @@ -1205,8 +1195,8 @@ The HPD interval does not have the property that the endpoints of the interval o ````julia julia> sigma95hpd = hpdinterval(mm1bstp[:σ]) 2-element Array{Float64,1}: - 35.4844 - 63.0209 + 35.4254 + 62.8875 ```` @@ -1215,8 +1205,8 @@ julia> sigma95hpd = hpdinterval(mm1bstp[:σ]) ````julia julia> abs2.(sigma95hpd) 2-element Array{Float64,1}: - 1259.14 - 3971.64 + 1254.96 + 3954.84 ```` @@ -1230,7 +1220,7 @@ Finally, a 95% HPD interval on $\sigma_1$ includes the boundary value $\sigma_1= julia> hpdinterval(mm1bstp[:σ₁]) 2-element Array{Float64,1}: 0.0 - 54.7193 + 54.5986 ```` @@ -1243,8 +1233,8 @@ In fact, the confidence level or coverage probability must be rather small befor ````julia julia> hpdinterval(mm1bstp[:σ₁], 0.798) 2-element Array{Float64,1}: - 9.83921 - 52.2513 + 0.0 + 42.3371 ```` @@ -1253,8 +1243,8 @@ julia> hpdinterval(mm1bstp[:σ₁], 0.798) ````julia julia> hpdinterval(mm1bstp[:σ₁], 0.799) 2-element Array{Float64,1}: - 0.0 - 42.525 + 0.0 + 42.3943 ```` @@ -1298,7 +1288,7 @@ The `ranef` extractor returns the conditional modes. ````julia julia> ranef(mm1) # FIXME return an ordered dict 1-element Array{Array{Float64,2},1}: - [-16.6282 0.369516 26.9747 -21.8014 53.5798 -42.4943] + [-16.6282 0.369516 … 53.5798 -42.4943] ```` diff --git a/docs/src/SingularCovariance.md b/docs/src/SingularCovariance.md index 6d7fc2a1e..a234d5e3d 100644 --- a/docs/src/SingularCovariance.md +++ b/docs/src/SingularCovariance.md @@ -33,7 +33,7 @@ As is customary (though not required) in Julia, a function whose name ends in `! An optional second argument of `true` in the call to `fit!` produces verbose output from the optimization. ````julia -julia> sleepm = fit!(lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]), true) +julia> sleepm = fit!(LinearMixedModel(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]), true) f_1: 1784.6423 [1.0, 0.0, 1.0] f_2: 1790.12564 [1.75, 0.0, 1.0] f_3: 1798.99962 [1.0, 1.0, 1.0] @@ -87,7 +87,7 @@ f_50: 1751.93934 [0.929191, 0.0181658, 0.222643] f_51: 1751.93935 [0.929254, 0.0182093, 0.222621] f_52: 1751.93935 [0.929189, 0.0181298, 0.222573] f_53: 1751.93934 [0.929254, 0.0181676, 0.22265] -f_54: 1751.93934 [0.929214, 0.0181717, 0.222647] +f_54: 1751.93934 [0.929215, 0.0181717, 0.222647] f_55: 1751.93934 [0.929208, 0.0181715, 0.222646] f_56: 1751.93934 [0.929209, 0.018173, 0.222652] f_57: 1751.93934 [0.929221, 0.0181684, 0.222645] @@ -120,8 +120,8 @@ The corresponding parameter vector is called $\theta$. ````julia julia> Λ = getΛ(sleepm)[1] -2×2 Array{Float64,2}: - 0.929221 0.0 +2×2 LowerTriangular{Float64,Array{Float64,2}}: + 0.929221 ⋅ 0.0181684 0.222645 ```` @@ -140,7 +140,7 @@ In terms of the estimates, ````julia julia> s² = varest(sleepm) # estimate of the residual variance -654.9414511016189 +654.941450830681 ```` @@ -163,7 +163,7 @@ Writing out the expressions for the elements of the covariance matrix in terms o ````julia julia> Λ[2, 1] / sqrt(Λ[2, 1]^2 + Λ[2, 2]^2) -0.08133212358418719 +0.08133214602351191 ```` @@ -179,7 +179,7 @@ Thus the estimated correlation can be written ````julia julia> Λ[2, 1] / norm(view(Λ, 2, :)) -0.08133212358418719 +0.08133214602351191 ```` @@ -213,7 +213,7 @@ Some details on the optimization process are available in an `OptSummary` object ````julia julia> sleepm.optsum Initial parameter vector: [1.0, 0.0, 1.0] -Initial objective value: 1784.6422961924686 +Initial objective value: 1784.6422961924507 Optimizer (from NLopt): LN_BOBYQA Lower bounds: [0.0, -Inf, 0.0] @@ -226,7 +226,7 @@ maxfeval: -1 Function evaluations: 57 Final parameter vector: [0.929221, 0.0181684, 0.222645] -Final objective value: 1751.9393444647078 +Final objective value: 1751.9393444646757 Return code: FTOL_REACHED @@ -325,7 +325,7 @@ julia> freqtable(issmall.(sleepmbstrp[:θ₁]), issmall.(sleepmbstrp[:θ₃])) 2×2 Named Array{Int64,2} Dim1 ╲ Dim2 │ false true ────────────┼───────────── -false │ 9685 308 +false │ 9684 309 true │ 7 0 ```` @@ -392,7 +392,7 @@ $\kappa^{-1}$ is small if either or both of $\theta_1$ or $\theta_3$ is small. ````julia julia> sum(issmall, rc) -315 +316 ```` @@ -414,7 +414,7 @@ julia> sum(isfinite, sleepmbstrp[:ρ₁]) # recall that ρ = NaN in 7 cases ````julia julia> sum(x -> x == -1, sleepmbstrp[:ρ₁]) # number of cases of rho == -1 -2 +1 ```` @@ -422,7 +422,7 @@ julia> sum(x -> x == -1, sleepmbstrp[:ρ₁]) # number of cases of rho == -1 ````julia julia> sum(x -> x == +1, sleepmbstrp[:ρ₁]) # number of cases of rho == +1 -306 +308 ```` @@ -435,9 +435,8 @@ That is, the values of $\theta_2$ were definitely negative. ````julia julia> sleepmbstrp[:θ₂][find(x -> x == -1, sleepmbstrp[:ρ₁])] -2-element Array{Float64,1}: - -0.265855 - -0.254484 +1-element Array{Float64,1}: + -0.254495 ```` @@ -477,23 +476,23 @@ Symbol[:Subject, :age, :height, :Occasion] ````julia -julia> oxboysm = fit!(lmm(@formula(height ~ 1 + age + (1+age | Subject)), oxboys)) +julia> oxboysm = fit(LinearMixedModel, @formula(height ~ 1 + age + (1+age | Subject)), oxboys) Linear mixed model fit by maximum likelihood Formula: height ~ 1 + age + ((1 + age) | Subject) logLik -2 logLik AIC BIC -362.98384 725.96769 737.96769 758.69962 Variance components: - Column Variance Std.Dev. Corr. - Subject (Intercept) 62.78847365 7.92391782 - age 2.71167720 1.64671710 0.64 - Residual 0.43545514 0.65989024 + Column Variance Std.Dev. Corr. + Subject (Intercept) 62.7890195 7.92395226 + age 2.7117909 1.64675163 0.64 + Residual 0.4354541 0.65988947 Number of obs: 234; levels of grouping factors: 26 Fixed-effects parameters: Estimate Std.Error z value P(>|z|) -(Intercept) 149.372 1.55461 96.0833 <1e-99 -age 6.52547 0.329768 19.7881 <1e-86 +(Intercept) 149.372 1.55461 96.0828 <1e-99 +age 6.52547 0.329774 19.7877 <1e-86 ```` @@ -502,7 +501,7 @@ age 6.52547 0.329768 19.7881 <1e-86 ````julia julia> show(getθ(oxboysm)) -[12.0079, 1.60161, 1.91366] +[12.008, 1.60168, 1.91367] ```` @@ -544,7 +543,7 @@ The empirical density of the correlation estimates shows that even in this case ````julia julia> extrema(oxboysmbtstrp[:ρ₁]) -(-0.048724310572162526, 0.9352854720073266) +(-0.04869178624968976, 0.9352905798612754) ```` @@ -558,7 +557,7 @@ The reciprocal condition number julia> rc = recipcond(oxboysmbtstrp); julia> extrema(rc) -(0.06152489984238238, 0.36869107666318435) +(0.06152149310502919, 0.3686950157128226) ```` @@ -597,16 +596,16 @@ Linear mixed model fit by maximum likelihood -67.25463 134.50927 146.50927 157.21441 Variance components: - Column Variance Std.Dev. Corr. - Subject (Intercept) 2.97201061 1.72395203 - age 0.02152085 0.14669986 -0.30 - Residual 0.44656043 0.66825177 - Number of obs: 44; levels of grouping factors: 11 + Column Variance Std.Dev. Corr. + Subject (Intercept) 2.971189195 1.72371378 + age 0.021509463 0.14666105 -0.30 + Residual 0.446597842 0.66827976 + Number of obs: 44; levels of grouping factors: 27 Fixed-effects parameters: Estimate Std.Error z value P(>|z|) -(Intercept) 17.3727 0.725217 23.9552 <1e-99 -age 0.479545 0.0631368 7.59533 <1e-13 +(Intercept) 17.3727 0.725181 23.9564 <1e-99 +age 0.479545 0.06313 7.59616 <1e-13 ```` @@ -615,7 +614,7 @@ age 0.479545 0.0631368 7.59533 <1e-13 ````julia julia> srand(1234123) -MersenneTwister(UInt32[0x0012d4cb], Base.dSFMT.DSFMT_state(Int32[1849428804, 1072710534, 1722234079, 1073299110, 2058053067, 1072801015, 18044541, 1072957251, 668716466, 1073001711, 1294148366, 1073392756, -246176162, 1073384180, 1439312554, 1073174261, 59840039, 1072796106, -1045579541, 1073468618, 388453685, 1073735140, -1275427225, 1072814875, -1971017192, 1072928173, -46910975, 1073472366, 860582882, 1073252538, 1154181238, 1073295647, -619955804, 1072833188, -1324140313, 1073322743, -1114678961, 1073257910, -1799686755, 1073464531, 276417507, 1073419406, 1970196725, 1073308789, -13922601, 1073313708, 640462978, 1073086920, 1821228270, 1073662023, 1531855689, 1072742242, -1437706174, 1073735270, -1006506637, 1072741588, 1732322399, 1072760605, -1923004190, 1072885547, -1622002665, 1073684278, 1022696057, 1072984566, -1521755664, 1073171884, 1273712108, 1073144109, 678390109, 1073200698, 2068056517, 1073357621, 210620177, 1073475212, 488577462, 1073611580, -1963669263, 1072724838, 1344926338, 1073220633, -2128026924, 1073025422, -920440580, 1072784275, -1091866162, 1073473678, -59369667, 1073554514, -2071907959, 1073025925, -1246947456, 1072920630, -1670610045, 1073254614, -1052964581, 1073455201, -1791712339, 1073535443, -704243129, 1073671224, -506703901, 1072889748, -1310651959, 1073169973, -537425724, 1073283538, 1793991520, 1073642067, -973437862, 1072747472, 1232110785, 1073611144, -1914344300, 1073038604, 759504088, 1073209170, -1976196993, 1072824746, 163828606, 1073250587, 2060443751, 1073334027, -1885928194, 1072757818, -1493065553, 1073557997, -832397309, 1073454123, 1467839181, 1073466932, 319269955, 1073693486, 1325787130, 1073523241, -896041432, 1072893329, -1878247912, 1072874719, 1190522197, 1072858280, -121108225, 1073117078, -16965821, 1073043162, -809336891, 1073273048, 1607485827, 1073643584, -1350170179, 1073431809, -1510996351, 1072752005, 1353691010, 1072846043, -1129116352, 1072755857, 1706625622, 1073084338, -243809912, 1073203323, -1119406567, 1072877086, -965616213, 1073162341, 1296869492, 1073023599, 884757660, 1073436321, 324346295, 1072746761, 1646872759, 1073453656, 1659590315, 1072993205, 1848094888, 1073608742, -1389293461, 1072930572, -842024947, 1073208906, 424909026, 1072894317, -1942412030, 1073663928, -1161059380, 1073353002, -1315723161, 1073132351, 2146816118, 1073481050, 1699536517, 1072856916, -647709167, 1072745038, 781257507, 1072962005, 558844032, 1072794643, -1827676894, 1072970089, 1391216679, 1073061922, 1468938343, 1073507066, -50573609, 1072965147, 550534557, 1073041040, 1111331492, 1073272372, 1854735545, 1073415603, 1481741113, 1072996097, -84629236, 1073496481, 2108607834, 1073652638, 1450897993, 1072705903, 22081081, 1072773460, -792905833, 1073321136, -1241866505, 1073266969, 1572339858, 1073553347, -544093909, 1072864034, -905361989, 1072994198, 597868072, 1072858235, -148690660, 1072726596, 2092121974, 1073392702, 1001020932, 1073359943, -664205090, 1073635439, 798487126, 1073739043, 95108301, 1073017923, -2102978354, 1073084683, 460916246, 1072970867, 1301312504, 1072884577, 1181661833, 1072703029, -1438526844, 1072836215, 1357881049, 1073238811, 407395602, 1072959641, -563671799, 1073328832, 1574312152, 1072868062, -1168474889, 1072956406, -1507947262, 1073337316, -1177970445, 1073075252, -1950883307, 1073730687, 1808826949, 1073712051, -673307179, 1073086883, 1234568777, 1073669206, 1086777039, 1072738451, 1176717639, 1073569439, -1108603235, 1073381359, -302636537, 1073410600, -1258968951, 1073366825, -1847183597, 1073005725, -820495507, 1073032147, -539222979, 1072797912, -1129119277, 1073126019, 29819990, 1073296701, -289856999, 1073118270, 1602752003, 1072898571, -436188404, 1073540928, -1736482116, 1073273284, 1183979036, 1073053117, 1407863340, 1073249816, 1515988008, 1073412855, 1427635406, 1073613554, -1552911431, 1073711931, -457593369, 1073395042, 583376543, 1072858985, 120403097, 1073336895, -546791565, 1072766143, -1945250252, 1073447776, 2092782457, 1073720655, -1361962500, 1073635827, 1098177616, 1072958526, 635047031, 1073130602, -2036676095, 1073586385, -1447984508, 1072822028, -1860097375, 1073668999, -734215219, 1073123323, 203113175, 1073423649, 12118255, 1072996726, -1847134656, 1073293818, -346685271, 1072728058, 1948116565, 1073720651, -1214839800, 1072949179, 1051019791, 1073646630, -1430462444, 1072698994, 683397666, 1073198427, 939105378, 1073371092, 1946538405, 1073672877, -131298527, 1073302859, -2006906568, 1073439839, 406137672, 1073273025, 323668036, 1073725260, -1520237861, 1073730982, 1630848572, 1073337302, 1908852154, 1073331846, 1757852111, 1073484221, 907450426, 1073250426, -429586529, 1073356068, 28909651, 1073285139, -1005714440, 1073633989, 735528808, 1073466649, -1287098702, 1073619338, -1999098996, 1073150981, 1742118585, 1073226201, 1056909257, 1073499160, 412542419, 1073609260, 1987033414, 1073426151, 373301362, 1073464813, -2072480688, 1073435334, -1275681021, 1072748215, 1979340606, 1072844123, 196469936, 1072761978, 250282766, 1072900592, 1046193758, 1073042979, 688446650, 1073676901, 484492819, 1073565985, 545232556, 1073062928, 1401725408, 1073481360, 1786249255, 1072767542, 342133395, 1073012367, 764190343, 1072838025, 834188368, 1073514097, 293469320, 1073320924, -129203618, 1072869113, -426121097, 1073393475, 443358151, 1073606557, 1057390546, 1073557435, -865292617, 1072993798, 165383692, 1073606073, 1896949958, 1073397960, 879983282, 1073707666, 536638657, 1073155831, -300176276, 1073113112, -400371866, 1073130583, -489576627, 1073044960, -1951713158, 1072917492, -1286700939, 1073478481, -783793209, 1073058458, -506161217, 1073421779, -1890992377, 1072746360, -1115406913, 1073614546, -162887009, 1073650333, 613307192, 1073504939, -304427970, 1073547012, -1940510584, 1072816758, -815665977, 1073287216, -2016029525, 1072754541, 1762115438, 1072974567, 717520751, 1073322548, -680193101, 1073004733, 34262767, 1073536000, 1400792921, 1073025746, -1842573561, 1073622161, -2124608432, 1072887936, 1075593791, 1073388138, -140230141, 1073354549, -1159659754, 1073645587, -863858741, 1073644778, 2134298558, 1073300168, -269344418, 1072821644, 1873828269, 1072792826, 1775543533, 1072947554, 586115388, 1072854654, 1436908889, 1072883723, -1533449616, 1072913043, 1124041442, 1072770958, -1621716787, 1073660676, -1242581393, 1073319921, -2101113220, 1072764611, 1897101736, 1073069969, -2051615881, 1072972945, -1139078463, 1072998784, -1325332950, 1072886743, -1112830824, 1073203480, 1947115707, 1073736193, 1682907517, 1073563739, 290460745, 1073097143, 85772401, 1072776213, -695753600, 1073729211, -1369847030, 1073684013, -1910559124, 1072979931, 1292463358, 1073595166, -269306072, 1073700601, 626399620, 1072699381, -1520057283, 1073322071, 792094481, 1073553556, -952075680, 1073670157, -518022544, 1073113445, 262171394, 1072967794, -401181589, 1073325461, 1897911001, 1073156198, 808299321, 1073238034, -1782147221, 1073046155, -1446909839, 1073134391, 886080174, 1073707227, -1108449298, 1072757675, 1916674698, 1073733168, -993331321, 1072794479, -73077411, 1073384707, -215330224, 1073662090, -1059500391, 1072776564, -991963568, 1072977065, -337793736, 1073707187, -635009394, 1073717248, -158432757, 1072838042, 524172367, 1073633605, -844798861, 1073721735, -74603517, 1073314254, -1792052474, 1073357247, -1665833071, 1073380480, -1320849645, 1073631090, 1600122228, 1073062140, 287155168, 1073446461, 1413166997, 1073632239, -1654370157, 1073564114, -1444845157, 1073208110, 2141164054, 1073616196, -1835755974, 1073563243, 36069359, 1072918616, -840815020, 1073711493, 1454018272, 1073136051, 551271058, 1073399877, -1809923840, 1073041572, 189180410, 1073256860, 995460455, 1073020843, -598226223, 1073361044, -1337535558, 1073546973, -1402851934, 1073052123, -1522889678, 1073158231, -1686951621, 1073000689, 38560395, 1073065622, -2123206258, 1072929243, 293987068, 1073714036, -783742036, 1073181345, -251423067, 1073222902, 1438164842, 1073479914, 653202388, 1072947503, 1576890362, 1073145194, -2127091864, 1073578927, 813994073, 1072811723, -538524903, 1073394838, -850020419, 1073345584, 923056093, 1073182638, -1712988658, 1072954406, 660416299, 1072739015, 1515592894, 1073664970, 299474099, 1072933157, -1877373193, 1072824159, 1954615641, 1072985757, 639258178, 1073117106, -1835827910, 1073347886, -1133029688, 1072928231, 461762273, 1072801274, -529589171, 1073089766, -444679559, 1072950048, 396342591, 1073739438, 1461622329, 1073056155, 1864686526, 1073711443, 365455052, 1073331975, 468459086, 1073028730, 1941567735, 1073559901, -1956440732, 1073625817, 1095988623, 1073653617, 1169091901, 1073677684, -1414753292, 1073263574, -2109827946, 1073530397, -846504813, 1072748417, 1735247338, 1072725749, 518053453, 1073505452, -36067822, 1073212775, -343350984, 1073391452, -1820369291, 1072882278, 1297201948, 1072820954, 278667265, 1072730773, -1439793814, 1073094809, -657226778, 1073567866, 1480483272, 1073740640, 1172949624, 1073031749, -2087268881, 1073183471, -1632841116, 1072797806, -303045015, 1073654302, 979350334, 1072975010, -393602063, 1073070555, -1261194796, 1072711071, 321440136, 1073513463, -126423279, 1073051373, -1153221639, 1073553062, 1653158638, 1073411494, 780501209, -2117144994, -394908522, -1446490633, 382, 0]), [1.39169, 1.01459, 1.69258, 1.91239, 1.6891, 1.69523, 1.38824, 1.00049, 1.93803, 1.94006, 1.90659, 1.63501, 1.91715, 1.48091, 1.10972, 1.80163, 1.26269, 1.50306, 1.68199, 1.57417, 1.81124, 1.68689, 1.18151, 1.53163, 1.16967, 1.44419, 1.23956, 1.59694, 1.85841, 1.60524, 1.78611, 1.46478, 1.98423, 1.72108, 1.66429, 1.70762, 1.39824, 1.78871, 1.81995, 1.49487, 1.24669, 1.74591, 1.22775, 1.65195, 1.91041, 1.45178, 1.65043, 1.49824, 1.47671, 1.93257, 1.13513, 1.76829, 1.26258, 1.95549, 1.72951, 1.93126, 1.66836, 1.74831, 1.4289, 1.62828, 1.69934, 1.8498, 1.13332, 1.9979, 1.38677, 1.70394, 1.67686, 1.24935, 1.00772, 1.41226, 1.48639, 1.45156, 1.40043, 1.91376, 1.88813, 1.68674, 1.96842, 1.70133, 1.53853, 1.58489, 1.7293, 1.76468, 1.87832, 1.13767, 1.4087, 1.44116, 1.31574, 1.86853, 1.93417, 1.94502, 1.21288, 1.96916, 1.08907, 1.55288, 1.40131, 1.12543, 1.21405, 1.67817, 1.17983, 1.99058, 1.99955, 1.021, 1.36419, 1.83445, 1.60462, 1.71884, 1.46441, 1.78621, 1.45319, 1.98843, 1.55662, 1.80779, 1.11781, 1.95475, 1.96495, 1.64982, 1.08662, 1.72398, 1.30742, 1.03123, 1.46261, 1.89265, 1.99604, 1.95782, 1.95518, 1.54616, 1.50369, 1.60303, 1.63555, 1.1163, 1.65027, 1.13288, 1.32219, 1.74818, 1.23475, 1.69259, 1.88469, 1.05621, 1.37915, 1.51552, 1.56632, 1.52667, 1.20825, 1.51445, 1.73014, 1.29619, 1.33702, 1.35274, 1.59769, 1.73476, 1.91596, 1.41436, 1.33519, 1.91299, 1.45369, 1.66721, 1.77278, 1.34307, 1.00324, 1.40304, 1.74047, 1.70785, 1.77585, 1.6332, 1.79741, 1.69388, 1.3171, 1.67221, 1.35684, 1.06447, 1.12468, 1.54196, 1.06154, 1.41146, 1.86812, 1.52971, 1.1563, 1.23706, 1.16389, 1.01507, 1.62896, 1.55833, 1.69745, 1.00809, 1.30821, 1.32629, 1.65988, 1.42734, 1.21396, 1.11986, 1.69339, 1.82858, 1.31863, 1.1525, 1.76046, 1.70297, 1.42043, 1.63997, 1.56172, 1.95448, 1.75838, 1.24174, 1.44877, 1.25432, 1.90751, 1.78323, 1.62425, 1.86824, 1.35815, 1.14457, 1.9352, 1.85055, 1.29918, 1.94123, 1.24999, 1.40297, 1.3838, 1.45346, 1.36219, 1.0115, 1.02739, 1.89383, 1.26001, 1.16968, 1.85881, 1.40997, 1.00037, 1.50492, 1.40556, 1.86905, 1.23646, 1.68106, 1.31574, 1.74962, 1.00291, 1.92415, 1.87567, 1.27393, 1.74007, 1.0389, 1.33037, 1.16264, 1.67845, 1.53356, 1.43217, 1.16943, 1.6584, 1.72777, 1.43911, 1.36428, 1.40679, 1.1439, 1.96602, 1.56063, 1.03341, 1.43778, 1.80941, 1.77579, 1.73572, 1.82685, 1.2145, 1.52935, 1.03663, 1.35444, 1.53689, 1.03897, 1.66977, 1.71988, 1.95437, 1.40077, 1.15403, 1.50026, 1.56632, 1.8615, 1.41536, 1.73983, 1.52685, 1.72446, 1.95235, 1.19034, 1.75043, 1.23639, 1.71512, 1.88732, 1.65388, 1.97635, 1.6025, 1.13761, 1.85906, 1.46835, 1.07552, 1.74679, 1.68907, 1.74825, 1.5992, 1.45209, 1.77065, 1.51303, 1.94757, 1.04173, 1.43018, 1.43237, 1.19421, 1.65903, 1.68714, 1.52056, 1.81748, 1.48228, 1.10787, 1.57301, 1.23938, 1.11674, 1.0279, 1.46869, 1.64583, 1.81857, 1.68066, 1.6342, 1.24168, 1.15932, 1.86415, 1.13741, 1.99294, 1.44182, 1.15146, 1.83543, 1.95767, 1.04504, 1.5624, 1.88916, 1.69743, 1.75566, 1.07714, 1.81709, 1.12531, 1.37952, 1.18183, 1.15131, 1.52327, 1.12375, 1.28035, 1.403, 1.47955, 1.53547, 1.76864, 1.15059, 1.43026, 1.59596, 1.41254, 1.26003, 1.69594, 1.73538, 1.92577, 1.41879, 1.51559, 1.61853, 1.07281, 1.67011, 1.44911, 1.58715, 1.17107, 1.72297, 1.19498, 1.57364, 1.39339, 1.57009, 1.08744, 1.16391, 1.32093, 1.30841, 1.43297, 1.47044, 1.97007, 1.85482, 1.55931, 1.20623, 1.79846, 1.70858, 1.79727, 1.41108, 1.47938, 1.55753], 382) +MersenneTwister(UInt32[0x0012d4cb], Base.dSFMT.DSFMT_state(Int32[1849428804, 1072710534, 1722234079, 1073299110, 2058053067, 1072801015, 18044541, 1072957251, 668716466, 1073001711 … -1153221639, 1073553062, 1653158638, 1073411494, 780501209, -2117144994, -394908522, -1446490633, 382, 0]), [1.39169, 1.01459, 1.69258, 1.91239, 1.6891, 1.69523, 1.38824, 1.00049, 1.93803, 1.94006 … 1.97007, 1.85482, 1.55931, 1.20623, 1.79846, 1.70858, 1.79727, 1.41108, 1.47938, 1.55753], 382) julia> orthfmbtstrp = bootstrap(10000, orthfm); @@ -628,8 +627,8 @@ julia> freqtable(issmall.(orthfmbtstrp[:θ₁]), issmall.(orthfmbtstrp[:θ₃])) 2×2 Named Array{Int64,2} Dim1 ╲ Dim2 │ false true ────────────┼───────────── -false │ 6855 3105 -true │ 39 1 +false │ 6773 3195 +true │ 32 0 ```` @@ -697,24 +696,24 @@ When the time origin is the beginning of the treatment there is not generally a ````julia julia> early = rcopy(R"subset(Early, select = c(cog, tos, id, trt, trttos))"); -julia> earlym = fit!(lmm(@formula(cog ~ 1 + tos + trttos + (1 + tos | id)), early)) +julia> earlym = fit(LinearMixedModel, @formula(cog ~ 1 + tos + trttos + (1 + tos | id)), early) Linear mixed model fit by maximum likelihood Formula: cog ~ 1 + tos + trttos + ((1 + tos) | id) logLik -2 logLik AIC BIC -1185.6369 2371.2738 2385.2738 2411.4072 Variance components: - Column Variance Std.Dev. Corr. - id (Intercept) 165.476124 12.863752 - tos 10.744608 3.277897 -1.00 - Residual 74.946764 8.657180 + Column Variance Std.Dev. Corr. + id (Intercept) 165.47659 12.8637705 + tos 10.74482 3.2779291 -1.00 + Residual 74.94683 8.6571838 Number of obs: 309; levels of grouping factors: 103 Fixed-effects parameters: Estimate Std.Error z value P(>|z|) -(Intercept) 120.783 1.8178 66.4448 <1e-99 +(Intercept) 120.783 1.8178 66.4447 <1e-99 tos -22.474 1.4878 -15.1055 <1e-50 -trttos 7.65205 1.43609 5.3284 <1e-7 +trttos 7.65205 1.43609 5.32841 <1e-7 ```` @@ -729,7 +728,7 @@ The model converges to a singular covariance matrix for the random effects. julia> getθ(earlym) 3-element Array{Float64,1}: 1.48591 - -0.378633 + -0.378637 0.0 ```` diff --git a/docs/src/SubjectItem.md b/docs/src/SubjectItem.md index ba34d7e4f..4227f6d41 100644 --- a/docs/src/SubjectItem.md +++ b/docs/src/SubjectItem.md @@ -8,43 +8,43 @@ julia> const dat = convert(Dict{Symbol,DataFrame}, load(Pkg.dir("MixedModels", " ````julia -julia> mm1 = fit!(lmm(@formula(Y ~ 1+S+T+U+V+W+X+Z+(1+S+T+U+V+W+X+Z|G)+(1+S+T+U+V+W+X+Z|H)), dat[:kb07])) +julia> mm1 = fit(LinearMixedModel, @formula(Y ~ 1+S+T+U+V+W+X+Z+(1+S+T+U+V+W+X+Z|G)+(1+S+T+U+V+W+X+Z|H)), dat[:kb07]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + S + T + U + V + W + X + Z + ((1 + S + T + U + V + W + X + Z) | G) + ((1 + S + T + U + V + W + X + Z) | H) - logLik -2 logLik AIC BIC - -1.4293159×10⁴ 2.8586318×10⁴ 2.8748318×10⁴ 2.91930056×10⁴ + logLik -2 logLik AIC BIC + -1.42931611×10⁴ 2.85863221×10⁴ 2.87483221×10⁴ 2.91930097×10⁴ Variance components: Column Variance Std.Dev. Corr. - G (Intercept) 90715.0184 301.189340 - S 5180.3901 71.974927 -0.43 - T 5543.1348 74.452232 -0.47 0.08 - U 7584.8816 87.091226 0.21 -0.20 0.41 - V 8832.9843 93.983958 0.20 -0.76 -0.54 -0.20 - W 1821.9809 42.684668 0.47 -0.53 -0.11 -0.44 0.28 - X 7417.1453 86.122850 -0.10 0.13 -0.05 -0.86 -0.06 0.70 - Z 3801.0318 61.652509 -0.47 0.41 -0.39 -0.09 0.18 -0.78 -0.39 - H (Intercept) 129690.6871 360.125932 - S 1856.9765 43.092650 -0.34 - T 62370.7020 249.741270 -0.68 -0.45 - U 2950.1553 54.315332 0.20 -0.03 -0.18 - V 1042.0598 32.280950 0.57 -0.76 0.02 0.02 - W 1620.5108 40.255569 0.28 -0.03 -0.27 0.44 -0.21 - X 4703.6870 68.583431 0.08 -0.24 0.21 -0.13 -0.26 0.02 - Z 4821.2335 69.435103 0.04 -0.47 0.32 -0.68 0.65 -0.69 -0.10 - Residual 399627.0136 632.160592 + G (Intercept) 90797.8433 301.326805 + S 5186.2898 72.015900 -0.43 + T 5545.5545 74.468480 -0.47 0.08 + U 7590.2433 87.122003 0.21 -0.20 0.41 + V 8839.5027 94.018630 0.20 -0.76 -0.54 -0.20 + W 1822.8053 42.694324 0.47 -0.53 -0.11 -0.44 0.29 + X 7417.7267 86.126226 -0.10 0.13 -0.05 -0.86 -0.06 0.70 + Z 3800.9670 61.651983 -0.48 0.41 -0.39 -0.09 0.18 -0.78 -0.39 + H (Intercept) 129801.4428 360.279673 + S 1855.0633 43.070446 -0.34 + T 62410.5894 249.821115 -0.68 -0.45 + U 2957.5947 54.383773 0.20 -0.03 -0.18 + V 1038.0660 32.219031 0.57 -0.75 0.02 0.01 + W 1608.1394 40.101613 0.28 -0.05 -0.27 0.44 -0.20 + X 4698.6232 68.546504 0.08 -0.25 0.21 -0.13 -0.26 0.01 + Z 4836.0681 69.541844 0.04 -0.46 0.32 -0.68 0.65 -0.69 -0.10 + Residual 399601.5053 632.140416 Number of obs: 1790; levels of grouping factors: 56, 32 Fixed-effects parameters: Estimate Std.Error z value P(>|z|) -(Intercept) 2180.63 76.7856 28.3989 <1e-99 -S -66.99 19.3346 -3.46478 0.0005 -T -333.881 47.6587 -7.00566 <1e-11 -U 78.987 21.235 3.71967 0.0002 -V 22.1518 20.3368 1.08925 0.2760 -W -18.9243 17.5061 -1.08101 0.2797 -X 5.26182 22.4216 0.234677 0.8145 -Z -23.951 21.0197 -1.13946 0.2545 +(Intercept) 2180.63 76.8177 28.387 <1e-99 +S -66.9899 19.3354 -3.46463 0.0005 +T -333.881 47.6721 -7.0037 <1e-11 +U 78.9869 21.2424 3.71837 0.0002 +V 22.1517 20.3362 1.08927 0.2760 +W -18.9244 17.4951 -1.0817 0.2794 +X 5.26191 22.418 0.234719 0.8144 +Z -23.9509 21.0303 -1.13888 0.2548 ```` @@ -53,21 +53,21 @@ Z -23.951 21.0197 -1.13946 0.2545 ````julia julia> mm1.optsum -Initial parameter vector: [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0] -Initial objective value: 30014.36976860626 +Initial parameter vector: [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0 … 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0] +Initial objective value: 30014.369768606295 Optimizer (from NLopt): LN_BOBYQA -Lower bounds: [0.0, -Inf, -Inf, -Inf, -Inf, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, -Inf, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, 0.0, -Inf, 0.0, 0.0, -Inf, -Inf, -Inf, -Inf, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, -Inf, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, 0.0, -Inf, 0.0] +Lower bounds: [0.0, -Inf, -Inf, -Inf, -Inf, -Inf, -Inf, -Inf, 0.0, -Inf … 0.0, -Inf, -Inf, -Inf, 0.0, -Inf, -Inf, 0.0, -Inf, 0.0] ftol_rel: 1.0e-12 ftol_abs: 1.0e-8 xtol_rel: 0.0 -xtol_abs: [1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10] -initial_step: [0.75, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 0.75, 1.0, 0.75, 0.75, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 0.75, 1.0, 0.75] +xtol_abs: [1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10 … 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10, 1.0e-10] +initial_step: [0.75, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.75, 1.0 … 0.75, 1.0, 1.0, 1.0, 0.75, 1.0, 1.0, 0.75, 1.0, 0.75] maxfeval: -1 -Function evaluations: 2829 -Final parameter vector: [0.476444, -0.0495227, -0.0557742, 0.0295214, 0.0292097, 0.0319541, -0.0139336, -0.0463229, 0.102521, -0.0170701, -0.0165868, -0.110967, -0.0243266, 0.0126983, 0.0218126, 0.102316, 0.0781059, -0.0943707, 0.00505476, -0.0135029, -0.0650083, 0.108318, 0.00550319, -0.0540444, -0.134248, 0.0516112, 0.0, 0.000111037, 9.89607e-5, -0.000120772, 0.0, 0.000144849, -0.000171985, 0.0, 1.49074e-5, 0.0, 0.569675, -0.0234122, -0.266913, 0.0173413, 0.0290032, 0.0178621, 0.00844082, 0.0049074, 0.0640206, -0.288127, 0.00329602, -0.0307006, 0.00437284, -0.0241439, -0.053426, 0.0425733, -0.0139551, -0.0162798, -0.0184925, 0.0991966, -0.00479098, 0.0829213, -0.00678668, 0.0223168, 0.000985977, -0.0775749, 0.0226446, -0.0535127, -0.0355039, 0.0560694, 0.00364195, -0.00381326, -0.00114592, 0.0, -5.49465e-5, 0.0] -Final objective value: 28586.31798310179 +Function evaluations: 2018 +Final parameter vector: [0.476677, -0.0494935, -0.0558944, 0.0295759, 0.0292159, 0.0319406, -0.0139343, -0.0463468, 0.102611, -0.0170349 … 0.0265041, -0.0488983, -0.0548502, 0.0604581, 0.00382623, -0.0032048, -0.00157491, 0.0, 8.21222e-5, 0.0] +Final objective value: 28586.322101083984 Return code: FTOL_REACHED @@ -78,14 +78,14 @@ Return code: FTOL_REACHED ````julia julia> mm1.trms[1].Λ 8×8 LowerTriangular{Float64,Array{Float64,2}}: - 0.476444 ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ - -0.0495227 0.102521 ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ - -0.0557742 -0.0170701 0.102316 ⋅ ⋅ ⋅ ⋅ ⋅ - 0.0295214 -0.0165868 0.0781059 0.108318 ⋅ ⋅ ⋅ ⋅ - 0.0292097 -0.110967 -0.0943707 0.00550319 0.0 ⋅ ⋅ ⋅ - 0.0319541 -0.0243266 0.00505476 -0.0540444 0.000111037 0.0 ⋅ ⋅ - -0.0139336 0.0126983 -0.0135029 -0.134248 9.89607e-5 0.000144849 0.0 ⋅ - -0.0463229 0.0218126 -0.0650083 0.0516112 -0.000120772 -0.000171985 1.49074e-5 0.0 + 0.476677 ⋅ ⋅ … ⋅ ⋅ ⋅ + -0.0494935 0.102611 ⋅ ⋅ ⋅ ⋅ + -0.0558944 -0.0170349 0.10229 ⋅ ⋅ ⋅ + 0.0295759 -0.0165326 0.0779579 ⋅ ⋅ ⋅ + 0.0292159 -0.111063 -0.0943527 ⋅ ⋅ ⋅ + 0.0319406 -0.0244269 0.00509686 … 0.0 ⋅ ⋅ + -0.0139343 0.0125996 -0.01326 -0.000291769 0.0 ⋅ + -0.0463468 0.0217752 -0.0650938 0.000361817 -6.80369e-5 0.0 ```` @@ -94,14 +94,14 @@ julia> mm1.trms[1].Λ ````julia julia> mm1.trms[2].Λ 8×8 LowerTriangular{Float64,Array{Float64,2}}: - 0.569675 ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ - -0.0234122 0.0640206 ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ - -0.266913 -0.288127 0.0425733 ⋅ ⋅ ⋅ ⋅ ⋅ - 0.0173413 0.00329602 -0.0139551 0.0829213 ⋅ ⋅ ⋅ ⋅ - 0.0290032 -0.0307006 -0.0162798 -0.00678668 0.0226446 ⋅ ⋅ ⋅ - 0.0178621 0.00437284 -0.0184925 0.0223168 -0.0535127 0.00364195 ⋅ ⋅ - 0.00844082 -0.0241439 0.0991966 0.000985977 -0.0355039 -0.00381326 0.0 ⋅ - 0.0049074 -0.053426 -0.00479098 -0.0775749 0.0560694 -0.00114592 -5.49465e-5 0.0 + 0.569936 ⋅ ⋅ … ⋅ ⋅ ⋅ + -0.0233133 0.0640217 ⋅ ⋅ ⋅ ⋅ + -0.26706 -0.288259 0.0420499 ⋅ ⋅ ⋅ + 0.0173555 0.00368853 -0.0116354 ⋅ ⋅ ⋅ + 0.029017 -0.0299586 -0.0111736 ⋅ ⋅ ⋅ + 0.0179099 0.0029801 -0.0289884 … 0.00382623 ⋅ ⋅ + 0.00850529 -0.0258243 0.0893922 -0.0032048 0.0 ⋅ + 0.00489805 -0.0520096 0.00572593 -0.00157491 8.21222e-5 0.0 ```` diff --git a/docs/src/assets/MultipleTerms_10_1.svg b/docs/src/assets/MultipleTerms_10_1.svg index a5a7335e2..3b4d188c7 100644 --- a/docs/src/assets/MultipleTerms_10_1.svg +++ b/docs/src/assets/MultipleTerms_10_1.svg @@ -9,44 +9,44 @@ stroke-width="0.3" font-size="3.88" > - - + + σ₁ - + 0.0 - 0.5 - 1.0 - 1.5 - 2.0 + 0.5 + 1.0 + 1.5 - - - + + + - + - + - - - + + - - - + + + + + - + 0 1 2 @@ -55,7 +55,7 @@ - + diff --git a/docs/src/assets/MultipleTerms_16_1.svg b/docs/src/assets/MultipleTerms_16_1.svg index a6ce74873..87f40d709 100644 --- a/docs/src/assets/MultipleTerms_16_1.svg +++ b/docs/src/assets/MultipleTerms_16_1.svg @@ -44,13 +44,15 @@ - - + + + + - + 0.0 0.2 0.4 diff --git a/docs/src/assets/MultipleTerms_26_1.svg b/docs/src/assets/MultipleTerms_26_1.svg index bbd13ce0c..074ea299d 100644 --- a/docs/src/assets/MultipleTerms_26_1.svg +++ b/docs/src/assets/MultipleTerms_26_1.svg @@ -9,42 +9,44 @@ stroke-width="0.3" font-size="3.88" > - - + + σ - + 0.0 0.5 1.0 1.5 - - - + + + - + - + - - - + + + + + - + 0 1 2 @@ -53,7 +55,7 @@ - + diff --git a/docs/src/assets/MultipleTerms_27_1.svg b/docs/src/assets/MultipleTerms_27_1.svg index ec7001c17..5b3d691bd 100644 --- a/docs/src/assets/MultipleTerms_27_1.svg +++ b/docs/src/assets/MultipleTerms_27_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + σ₁ - + 0 1 2 @@ -21,17 +21,17 @@ 4 5 - - - + + + - + - + @@ -39,21 +39,23 @@ - - - + + + + + - + 0.0 0.5 1.0 - + diff --git a/docs/src/assets/MultipleTerms_8_1.svg b/docs/src/assets/MultipleTerms_8_1.svg index 58f432b8c..bb0add447 100644 --- a/docs/src/assets/MultipleTerms_8_1.svg +++ b/docs/src/assets/MultipleTerms_8_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + β₁ - + 19 20 21 @@ -24,12 +24,12 @@ 26 27 - - - + + + - + @@ -38,7 +38,7 @@ - + @@ -49,14 +49,16 @@ - - - + + + + + - + 0.0 0.1 0.2 @@ -67,7 +69,7 @@ - + diff --git a/docs/src/assets/MultipleTerms_9_1.svg b/docs/src/assets/MultipleTerms_9_1.svg index 19e9e1144..31d42b44a 100644 --- a/docs/src/assets/MultipleTerms_9_1.svg +++ b/docs/src/assets/MultipleTerms_9_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + σ - + 0.3 0.4 0.5 @@ -21,18 +21,18 @@ 0.7 0.8 - - - + + + - + - + @@ -40,14 +40,16 @@ - - - + + + + + - + 0 5 10 @@ -55,7 +57,7 @@ - + diff --git a/docs/src/assets/SimpleLMM_4_1.svg b/docs/src/assets/SimpleLMM_4_1.svg index 08b8e5a75..d8f319b36 100644 --- a/docs/src/assets/SimpleLMM_4_1.svg +++ b/docs/src/assets/SimpleLMM_4_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + Yield of dyestuff (g) - + 1400 1450 1500 @@ -21,19 +21,19 @@ 1600 1650 - - - + + + - + - + @@ -41,105 +41,165 @@ - - - - - + + + + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - + A B C @@ -147,102 +207,102 @@ E F - + Batch - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + diff --git a/docs/src/assets/SimpleLMM_53_1.svg b/docs/src/assets/SimpleLMM_53_1.svg index 3ee252c2d..dc8fe9e13 100644 --- a/docs/src/assets/SimpleLMM_53_1.svg +++ b/docs/src/assets/SimpleLMM_53_1.svg @@ -9,59 +9,61 @@ stroke-width="0.3" font-size="3.88" > - - - Standard Normal Quantiles + + + x - - -3 - -2 - -1 - 0 - 1 - 2 + + -3 + -2 + -1 + 0 + 1 + 2 3 - - - - + + + + - - - - - + + + + + - - - - - - - + + + + + + + - - - + + + + + - - 1450 - 1500 - 1550 - 1600 + + 1450 + 1500 + 1550 + 1600 - - β₁ + + y - - + + diff --git a/docs/src/assets/SimpleLMM_54_1.svg b/docs/src/assets/SimpleLMM_54_1.svg index 4384a975e..1be44c15b 100644 --- a/docs/src/assets/SimpleLMM_54_1.svg +++ b/docs/src/assets/SimpleLMM_54_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + Standard Normal quantiles - + -3 -2 -1 @@ -22,19 +22,19 @@ 2 3 - - - + + + - + - + @@ -43,26 +43,28 @@ - - - + + + + + - + 0 20 40 60 80 - + σ - + diff --git a/docs/src/assets/SimpleLMM_55_1.svg b/docs/src/assets/SimpleLMM_55_1.svg index 675b98d46..e68cb0ede 100644 --- a/docs/src/assets/SimpleLMM_55_1.svg +++ b/docs/src/assets/SimpleLMM_55_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + Standard Normal Quantiles - + -3 -2 -1 @@ -22,19 +22,19 @@ 2 3 - - - + + + - + - + @@ -43,26 +43,28 @@ - - - + + + + + - + 0 20 40 60 80 - + σ₁ - + diff --git a/docs/src/assets/SimpleLMM_56_1.svg b/docs/src/assets/SimpleLMM_56_1.svg index fb805f7d3..321eaf2fd 100644 --- a/docs/src/assets/SimpleLMM_56_1.svg +++ b/docs/src/assets/SimpleLMM_56_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + Standard Normal quantiles - + -3 -2 -1 @@ -22,12 +22,12 @@ 2 3 - - - + + + - + @@ -35,7 +35,7 @@ - + @@ -44,14 +44,16 @@ - - - + + + + + - + 0 1.0×10³ 2.0×10³ @@ -59,12 +61,12 @@ 4.0×10³ 5.0×10³ - + σ² - + diff --git a/docs/src/assets/SimpleLMM_57_1.svg b/docs/src/assets/SimpleLMM_57_1.svg index ffba7a3e6..7377a475a 100644 --- a/docs/src/assets/SimpleLMM_57_1.svg +++ b/docs/src/assets/SimpleLMM_57_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + Standard Normal Quantiles - + -3 -2 -1 @@ -22,12 +22,12 @@ 2 3 - - - + + + - + @@ -36,7 +36,7 @@ - + @@ -45,14 +45,16 @@ - - - + + + + + - + 0 1.0×10³ 2.0×10³ @@ -61,12 +63,12 @@ 5.0×10³ 6.0×10³ - + σ₁² - + diff --git a/docs/src/assets/SimpleLMM_71_1.svg b/docs/src/assets/SimpleLMM_71_1.svg index ef78f87cc..a74ab249a 100644 --- a/docs/src/assets/SimpleLMM_71_1.svg +++ b/docs/src/assets/SimpleLMM_71_1.svg @@ -9,59 +9,61 @@ stroke-width="0.3" font-size="3.88" > - - + + σ₁ - + 0 20 40 60 80 - - - + + + - + - + - - - - + + + + - + - - - + + + - - + + - - + + + + - + 0.0 0.1 0.9 @@ -69,7 +71,7 @@ - + diff --git a/docs/src/assets/SimpleLMM_72_1.svg b/docs/src/assets/SimpleLMM_72_1.svg index 02e053b4c..f67e86895 100644 --- a/docs/src/assets/SimpleLMM_72_1.svg +++ b/docs/src/assets/SimpleLMM_72_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + σ₁² - + 0 1.0×10³ 2.0×10³ @@ -22,12 +22,12 @@ 5.0×10³ 6.0×10³ - - - + + + - + @@ -36,36 +36,38 @@ - + - - - - + + + + - + - - - + + + - - + + - - + + + + - + 0.0 0.1 0.9 @@ -73,7 +75,7 @@ - + diff --git a/docs/src/assets/SimpleLMM_7_1.svg b/docs/src/assets/SimpleLMM_7_1.svg index 7c6fe93a0..89eb7dd7a 100644 --- a/docs/src/assets/SimpleLMM_7_1.svg +++ b/docs/src/assets/SimpleLMM_7_1.svg @@ -9,135 +9,195 @@ stroke-width="0.3" font-size="3.88" > - - + + Simulated response - + -5 0 5 10 15 - - - + + + - + - + - - - - - + + + + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - - + + + + - + A B C @@ -147,97 +207,97 @@ - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + diff --git a/docs/src/assets/SingularCovariance_14_1.svg b/docs/src/assets/SingularCovariance_14_1.svg index e158e7000..071367a3a 100644 --- a/docs/src/assets/SingularCovariance_14_1.svg +++ b/docs/src/assets/SingularCovariance_14_1.svg @@ -40,13 +40,15 @@ - - + + + + - + 0.0 0.5 1.0 diff --git a/docs/src/assets/SingularCovariance_15_1.svg b/docs/src/assets/SingularCovariance_15_1.svg index a82c2216d..0f3ff3cf4 100644 --- a/docs/src/assets/SingularCovariance_15_1.svg +++ b/docs/src/assets/SingularCovariance_15_1.svg @@ -44,13 +44,15 @@ - - + + + + - + 0 2 4 diff --git a/docs/src/assets/SingularCovariance_16_1.svg b/docs/src/assets/SingularCovariance_16_1.svg index ce83f0263..e15a07ea8 100644 --- a/docs/src/assets/SingularCovariance_16_1.svg +++ b/docs/src/assets/SingularCovariance_16_1.svg @@ -42,29 +42,29 @@ - - - - - - + + + + + + - + - + - + - - + + @@ -75,15 +75,15 @@ - - + + - + - - - - + + + + @@ -93,44 +93,44 @@ - - + + - + - - + + - - + + - + - + - - + + - + - - + + - - - + + + - + @@ -139,13 +139,13 @@ - + - - + + @@ -157,7 +157,7 @@ - + @@ -175,7 +175,7 @@ - + diff --git a/docs/src/assets/SingularCovariance_17_1.svg b/docs/src/assets/SingularCovariance_17_1.svg index c6a6860b1..b5bf2e190 100644 --- a/docs/src/assets/SingularCovariance_17_1.svg +++ b/docs/src/assets/SingularCovariance_17_1.svg @@ -92,8 +92,8 @@ - - + + @@ -103,8 +103,8 @@ - - + + @@ -116,9 +116,9 @@ - - - + + + @@ -127,9 +127,9 @@ - + - + @@ -140,16 +140,16 @@ - - + + - - + + - + @@ -157,8 +157,8 @@ - - + + @@ -187,11 +187,11 @@ - - + + - - + + diff --git a/docs/src/assets/SingularCovariance_19_1.svg b/docs/src/assets/SingularCovariance_19_1.svg index 6730a9f3d..d1d2d49c2 100644 --- a/docs/src/assets/SingularCovariance_19_1.svg +++ b/docs/src/assets/SingularCovariance_19_1.svg @@ -14,11 +14,12 @@ κ⁻¹ - 0.00 - 0.25 - 0.50 - 0.75 - 1.00 + 0.0 + 0.2 + 0.4 + 0.6 + 0.8 + 1.0 @@ -34,43 +35,44 @@ - - - + + + + - + - + - + - + - + - - - - + + + + - + @@ -111,8 +113,8 @@ - - + + diff --git a/docs/src/assets/SingularCovariance_21_1.svg b/docs/src/assets/SingularCovariance_21_1.svg index f6e7527dd..a9804401c 100644 --- a/docs/src/assets/SingularCovariance_21_1.svg +++ b/docs/src/assets/SingularCovariance_21_1.svg @@ -92,8 +92,8 @@ - - + + @@ -103,8 +103,8 @@ - - + + @@ -116,9 +116,9 @@ - - - + + + @@ -127,9 +127,9 @@ - + - + @@ -140,16 +140,16 @@ - - + + - - + + - + @@ -157,8 +157,8 @@ - - + + @@ -187,11 +187,11 @@ - - + + - - + + diff --git a/docs/src/assets/SingularCovariance_32_1.svg b/docs/src/assets/SingularCovariance_32_1.svg index c8f924585..dfd23c102 100644 --- a/docs/src/assets/SingularCovariance_32_1.svg +++ b/docs/src/assets/SingularCovariance_32_1.svg @@ -41,71 +41,71 @@ - + - + - + - - + + - - + + - + - + - + - + - - - - + + + + - + - + - + - + - + - + - + - - + + - - + + - - + + @@ -113,58 +113,58 @@ - - - + + + - + - + - - + + - - - + + + - + - - - - - + + + + + - + - + - - + + - - - - - - + + + + + + - - + + - - + + - - + + @@ -176,19 +176,19 @@ - - + + - + - + - + - + diff --git a/docs/src/assets/SingularCovariance_35_1.svg b/docs/src/assets/SingularCovariance_35_1.svg index 28de677b7..05550c519 100644 --- a/docs/src/assets/SingularCovariance_35_1.svg +++ b/docs/src/assets/SingularCovariance_35_1.svg @@ -41,93 +41,93 @@ - + - + - - + + - - + + - - - - + + + + - - + + - - - - - - + + + + + + - - + + - - - - - - + + + + + + - - + + - + - + - - + + - + - + - - + + - - + + - - - - + + + + - - + + @@ -138,7 +138,7 @@ - + @@ -153,11 +153,11 @@ - + - + @@ -168,11 +168,11 @@ - + - + @@ -183,11 +183,11 @@ - + - + diff --git a/docs/src/assets/SingularCovariance_40_1.svg b/docs/src/assets/SingularCovariance_40_1.svg index 7e4cd8981..3acee665a 100644 --- a/docs/src/assets/SingularCovariance_40_1.svg +++ b/docs/src/assets/SingularCovariance_40_1.svg @@ -9,194 +9,196 @@ stroke-width="0.3" font-size="3.88" > - - + + ρ₁₂ - + -1.0 - -0.5 - 0.0 - 0.5 - 1.0 + -0.5 + 0.0 + 0.5 + 1.0 + 1.5 - - - + + + - + - + - - - + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + 0 1×10³ 2×10³ @@ -204,7 +206,7 @@ - + diff --git a/docs/src/assets/SingularCovariance_46_1.svg b/docs/src/assets/SingularCovariance_46_1.svg index 2687ccf1c..8a6194a0f 100644 --- a/docs/src/assets/SingularCovariance_46_1.svg +++ b/docs/src/assets/SingularCovariance_46_1.svg @@ -9,11 +9,11 @@ stroke-width="0.3" font-size="3.88" > - - + + Conditional mean of intercept random effect - + -50 -40 -30 @@ -24,12 +24,12 @@ 20 30 - - - + + + - + @@ -37,7 +37,7 @@ - + @@ -48,324 +48,530 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + -10 -5 0 @@ -373,321 +579,321 @@ 10 15 - + Conditional mean of slope random effect - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + - + diff --git a/docs/src/assets/bootstrap_6_1.svg b/docs/src/assets/bootstrap_6_1.svg new file mode 100644 index 000000000..28c4ea6e1 --- /dev/null +++ b/docs/src/assets/bootstrap_6_1.svg @@ -0,0 +1,64 @@ + + + + + Parametric bootstrap estimates of σ + + + 0 + 50 + 100 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0.00 + 0.01 + 0.02 + 0.03 + 0.04 + 0.05 + 0.06 + + + + + + + + diff --git a/docs/src/assets/bootstrap_7_1.svg b/docs/src/assets/bootstrap_7_1.svg new file mode 100644 index 000000000..30d88ac48 --- /dev/null +++ b/docs/src/assets/bootstrap_7_1.svg @@ -0,0 +1,60 @@ + + + + + Parametric bootstrap estimates of σ₁ + + + -50 + 0 + 50 + 100 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0.00 + 0.01 + 0.02 + 0.03 + + + + + + + + diff --git a/docs/src/assets/bootstrap_8_1.svg b/docs/src/assets/bootstrap_8_1.svg index 8be861427..e215281c2 100644 --- a/docs/src/assets/bootstrap_8_1.svg +++ b/docs/src/assets/bootstrap_8_1.svg @@ -9,58 +9,199 @@ stroke-width="0.3" font-size="3.88" > - - - Parametric bootstrap estimates of σ² + + + Parametric bootstrap estimates of σ₁ - - 0 - 2.0×10³ - 4.0×10³ - 6.0×10³ - 8.0×10³ + + 0 + 50 + 100 - - - - + + + + - - - - - - - - + + + + + - - - - - + + + - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - 0.0000 - 0.0001 - 0.0002 - 0.0003 - 0.0004 - 0.0005 - 0.0006 + + 0 + 5.0×10³ + 1.0×10⁴ + 1.5×10⁴ - - + + diff --git a/docs/src/bootstrap.md b/docs/src/bootstrap.md index dd3a09b81..cca05f3e0 100644 --- a/docs/src/bootstrap.md +++ b/docs/src/bootstrap.md @@ -22,7 +22,7 @@ parameter, `θ`, that defines the variance-covariance matrices of the random eff For example, a simple linear mixed-effects model for the `Dyestuff` data in the [`lme4`](http://github.com/lme4/lme4) package for [`R`](https://www.r-project.org) is fit by ````julia -julia> using DataFrames, Gadfly, MixedModels, RData +julia> using DataFrames, MixedModels, RData, Gadfly ```` @@ -34,26 +34,26 @@ julia> ds = names!(dat[:Dyestuff], [:Batch, :Yield]) 30×2 DataFrames.DataFrame │ Row │ Batch │ Yield │ ├─────┼───────┼────────┤ -│ 1 │ "A" │ 1545.0 │ -│ 2 │ "A" │ 1440.0 │ -│ 3 │ "A" │ 1440.0 │ -│ 4 │ "A" │ 1520.0 │ -│ 5 │ "A" │ 1580.0 │ -│ 6 │ "B" │ 1540.0 │ -│ 7 │ "B" │ 1555.0 │ -│ 8 │ "B" │ 1490.0 │ +│ 1 │ A │ 1545.0 │ +│ 2 │ A │ 1440.0 │ +│ 3 │ A │ 1440.0 │ +│ 4 │ A │ 1520.0 │ +│ 5 │ A │ 1580.0 │ +│ 6 │ B │ 1540.0 │ +│ 7 │ B │ 1555.0 │ +│ 8 │ B │ 1490.0 │ ⋮ -│ 22 │ "E" │ 1630.0 │ -│ 23 │ "E" │ 1515.0 │ -│ 24 │ "E" │ 1635.0 │ -│ 25 │ "E" │ 1625.0 │ -│ 26 │ "F" │ 1520.0 │ -│ 27 │ "F" │ 1455.0 │ -│ 28 │ "F" │ 1450.0 │ -│ 29 │ "F" │ 1480.0 │ -│ 30 │ "F" │ 1445.0 │ - -julia> m1 = fit!(lmm(@formula(Yield ~ 1 + (1 | Batch)), ds)) +│ 22 │ E │ 1630.0 │ +│ 23 │ E │ 1515.0 │ +│ 24 │ E │ 1635.0 │ +│ 25 │ E │ 1625.0 │ +│ 26 │ F │ 1520.0 │ +│ 27 │ F │ 1455.0 │ +│ 28 │ F │ 1450.0 │ +│ 29 │ F │ 1480.0 │ +│ 30 │ F │ 1445.0 │ + +julia> m1 = fit(LinearMixedModel, @formula(Yield ~ 1 + (1 | Batch)), ds) Linear mixed model fit by maximum likelihood Formula: Yield ~ 1 + (1 | Batch) logLik -2 logLik AIC BIC @@ -61,7 +61,7 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. - Batch (Intercept) 1388.3333 37.260345 + Batch (Intercept) 1388.3332 37.260344 Residual 2451.2500 49.510100 Number of obs: 30; levels of grouping factors: 6 @@ -76,70 +76,33 @@ Variance components: - -## Using the `bootstrap!` function - -This quick explanation is provided for those who only wish to use the `bootstrap!` method and do not need -detailed explanations of how it works. -The three arguments to `bootstrap!` are the matrix that will be overwritten with the results, the model to bootstrap, -and a function that overwrites a vector with the results of interest from the model. - -Suppose the objective is to obtain 100,000 parametric bootstrap samples of the estimates of the "variance -components", `σ²` and `σ₁²`, in this model. In many implementations of mixed-effects models the -estimate of `σ₁²`, the variance of the scalar random effects, is reported along with a -standard error, as if the estimator could be assumed to have a Gaussian distribution. -Is this a reasonable assumption? - -A suitable function to save the results is +Now bootstrap the model parameters ````julia -julia> function saveresults!(v, m) - v[1] = varest(m) - v[2] = abs2(getθ(m)[1]) * v[1] -end -saveresults! (generic function with 1 method) - -```` - - - - -The `varest` extractor function returns the estimate of `σ²`. As seen above, the estimate of the -`σ₁` is the product of `Θ` and the estimate of `σ`. The expression `abs2(getΘ(m)[1])` evaluates to -`Θ²`. The `[1]` is necessary because the value returned by `getθ` is a vector and a scalar is needed -here. - -As with any simulation-based method, it is advisable to set the random number seed before calling -`bootstrap!` for reproducibility. -````julia -julia> srand(1234321); - -```` - - - -````julia -julia> results = bootstrap!(zeros(2, 100000), m1, saveresults!); +julia> results = bootstrap(100_000, m1); +julia> showcompact(names(results)) +Symbol[:obj, :σ, :β₁, :θ₁, :σ₁] ```` The results for each bootstrap replication are stored in the columns of the matrix passed in as the first -argument. A density plot of the first row using the [`Gadfly`](https://github.com/dcjones/Gadfly.jl) package -is created as +argument. A density plot of the bootstrapped values of `σ` is created as ````julia -plot(x = view(results, 1, :), Geom.density(), Guide.xlabel("Parametric bootstrap estimates of σ²")) +plot(results, x = :σ, Geom.density, Guide.xlabel("Parametric bootstrap estimates of σ")) ```` -![Density of parametric bootstrap estimates of σ² from model m1](./assets//bootstrap_8_1.svg) +![Density of parametric bootstrap estimates of σ from model m1](./assets//bootstrap_6_1.svg) + +![Density of parametric bootstrap estimates of σ₁ from model m1](./assets//bootstrap_7_1.svg) -![Density of parametric bootstrap estimates of σ₁² from model m1](./assets//bootstrap_9_1.svg) +![Histogram of parametric bootstrap estimates of σ₁ from model m1](./assets//bootstrap_8_1.svg) -The distribution of the bootstrap samples of `σ²` is a bit skewed but not terribly so. However, the -distribution of the bootstrap samples of the estimate of `σ₁²` is highly skewed and has a spike at +The distribution of the bootstrap samples of `σ` is a bit skewed but not terribly so. However, the +distribution of the bootstrap samples of the estimate of `σ₁` is highly skewed and has a spike at zero. diff --git a/docs/src/constructors.md b/docs/src/constructors.md index 5e71ff9ff..dcd1c8336 100644 --- a/docs/src/constructors.md +++ b/docs/src/constructors.md @@ -1,9 +1,9 @@ # Model constructors -The `lmm` function creates a linear mixed-effects model representation from a `Formula` and an appropriate `data` type. -At present the data type must be a `DataFrame` but this is expected to change. +The `LinearMixedModel` type represents a linear mixed-effects model. +Typically it is constructed from a `Formula` and an appropriate `data` type, usually a `DataFrame`. ```@docs -lmm +LinearMixedModel ``` ## Examples of linear mixed-effects model fits @@ -52,7 +52,7 @@ Categorical covariates not suitable as grouping factors are named starting with The formula language in *Julia* is similar to that in *R* except that the formula must be enclosed in a call to the `@formula` macro. A basic model with simple, scalar random effects for the levels of `G` (the batch of an intermediate product, in this case) is declared and fit as ````julia -julia> fm1 = fit!(lmm(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff])) +julia> fm1 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G)), dat[:Dyestuff]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + (1 | G) logLik -2 logLik AIC BIC @@ -60,7 +60,7 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. - G (Intercept) 1388.3333 37.260345 + G (Intercept) 1388.3332 37.260344 Residual 2451.2500 49.510100 Number of obs: 30; levels of grouping factors: 6 @@ -79,8 +79,8 @@ Variance components: The second and subsequent calls to such functions are much faster.) ````julia -julia> @time fit!(lmm(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff2])) - 0.000941 seconds (1.44 k allocations: 74.375 KiB) +julia> @time fit(LinearMixedModel, @formula(Y ~ 1 + (1|G)), dat[:Dyestuff2]) + 0.000789 seconds (1.34 k allocations: 71.859 KiB) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + (1 | G) logLik -2 logLik AIC BIC @@ -119,7 +119,7 @@ It corresponds to a shift in the intercept for each level of the grouping factor The *sleepstudy* data are observations of reaction time, `Y`, on several subjects, `G`, after 0 to 9 days of sleep deprivation, `U`. A model with random intercepts and random slopes for each subject, allowing for within-subject correlation of the slope and intercept, is fit as ````julia -julia> fm2 = fit!(lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy])) +julia> fm2 = fit(LinearMixedModel, @formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + U + ((1 + U) | G) logLik -2 logLik AIC BIC @@ -146,7 +146,7 @@ U 10.4673 1.50224 6.96781 <1e-11 A model with uncorrelated random effects for the intercept and slope by subject is fit as ````julia -julia> fm3 = fit!(lmm(@formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy])) +julia> fm3 = fit(LinearMixedModel, @formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + U + (1 | G) + ((0 + U) | G) logLik -2 logLik AIC BIC @@ -154,7 +154,7 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. Corr. - G (Intercept) 584.258971 24.17145 + G (Intercept) 584.258973 24.17145 U 33.632805 5.79938 0.00 Residual 653.115782 25.55613 Number of obs: 180; levels of grouping factors: 18 @@ -179,7 +179,7 @@ and, internally, are amalgamated into a single vector-valued term. A model for the *Penicillin* data incorporates random effects for the plate, `G`, and for the sample, `H`. As every sample is used on every plate these two factors are *crossed*. ````julia -julia> fm4 = fit!(lmm(@formula(Y ~ 1 + (1|G) + (1|H)), dat[:Penicillin])) +julia> fm4 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G) + (1|H)), dat[:Penicillin]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + (1 | G) + (1 | H) logLik -2 logLik AIC BIC @@ -188,7 +188,7 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. G (Intercept) 0.7149795 0.8455646 - H (Intercept) 3.1351920 1.7706474 + H (Intercept) 3.1351924 1.7706474 Residual 0.3024264 0.5499331 Number of obs: 144; levels of grouping factors: 24, 6 @@ -206,7 +206,7 @@ Variance components: In contrast the sample, `G`, grouping factor is *nested* within the batch, `H`, grouping factor in the *Pastes* data. That is, each level of `G` occurs in conjunction with only one level of `H`. ````julia -julia> fm5 = fit!(lmm(@formula(Y ~ 1 + (1|G) + (1|H)), dat[:Pastes])) +julia> fm5 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G) + (1|H)), dat[:Pastes]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + (1 | G) + (1 | H) logLik -2 logLik AIC BIC @@ -214,8 +214,8 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. - G (Intercept) 8.4336166 2.90406897 - H (Intercept) 1.1991794 1.09507048 + G (Intercept) 8.4336167 2.90406898 + H (Intercept) 1.1991793 1.09507045 Residual 0.6780021 0.82340884 Number of obs: 60; levels of grouping factors: 30, 10 @@ -234,17 +234,17 @@ In observational studies it is common to encounter *partially crossed* grouping For example, the *InstEval* data are course evaluations by students, `G`, of instructors, `H`. Additional covariates include the academic department, `I`, in which the course was given and `A`, whether or not it was a service course. ````julia -julia> fm6 = fit!(lmm(@formula(Y ~ 1 + A * I + (1|G) + (1|H)), dat[:InstEval])) +julia> fm6 = fit(LinearMixedModel, @formula(Y ~ 1 + A * I + (1|G) + (1|H)), dat[:InstEval]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + A * I + (1 | G) + (1 | H) logLik -2 logLik AIC BIC -1.18792777×10⁵ 2.37585553×10⁵ 2.37647553×10⁵ 2.37932876×10⁵ Variance components: - Column Variance Std.Dev. - G (Intercept) 0.105417976 0.32468135 - H (Intercept) 0.258416368 0.50834670 - Residual 1.384727771 1.17674457 + Column Variance Std.Dev. + G (Intercept) 0.10541790 0.32468122 + H (Intercept) 0.25841635 0.50834669 + Residual 1.38472780 1.17674458 Number of obs: 73421; levels of grouping factors: 2972, 1128 Fixed-effects parameters: @@ -253,21 +253,21 @@ Variance components: A: 1 0.252025 0.0686507 3.67112 0.0002 I: 5 0.129536 0.101294 1.27882 0.2010 I: 10 -0.176751 0.0881352 -2.00545 0.0449 -I: 12 0.0517102 0.0817524 0.632522 0.5270 +I: 12 0.0517102 0.0817523 0.632523 0.5270 I: 6 0.0347319 0.085621 0.405647 0.6850 I: 7 0.14594 0.0997984 1.46235 0.1436 I: 4 0.151689 0.0816897 1.85689 0.0633 I: 8 0.104206 0.118751 0.877517 0.3802 -I: 9 0.0440401 0.0962985 0.457329 0.6474 +I: 9 0.0440401 0.0962985 0.45733 0.6474 I: 14 0.0517546 0.0986029 0.524879 0.5997 I: 1 0.0466719 0.101942 0.457828 0.6471 I: 3 0.0563461 0.0977925 0.57618 0.5645 -I: 11 0.0596536 0.100233 0.59515 0.5517 -I: 2 0.00556281 0.110867 0.0501756 0.9600 +I: 11 0.0596536 0.100233 0.595151 0.5517 +I: 2 0.00556285 0.110867 0.0501761 0.9600 A: 1 & I: 5 -0.180757 0.123179 -1.46744 0.1423 -A: 1 & I: 10 0.0186492 0.110017 0.169513 0.8654 -A: 1 & I: 12 -0.282269 0.0792937 -3.55979 0.0004 -A: 1 & I: 6 -0.494464 0.0790278 -6.25683 <1e-9 +A: 1 & I: 10 0.0186492 0.110017 0.169512 0.8654 +A: 1 & I: 12 -0.282269 0.0792937 -3.5598 0.0004 +A: 1 & I: 6 -0.494464 0.0790278 -6.25684 <1e-9 A: 1 & I: 7 -0.392054 0.110313 -3.55403 0.0004 A: 1 & I: 4 -0.278547 0.0823727 -3.38154 0.0007 A: 1 & I: 8 -0.189526 0.111449 -1.70056 0.0890 @@ -287,15 +287,15 @@ A: 1 & I: 2 -0.384773 0.091843 -4.18946 <1e-4 ## Fitting generalized linear mixed models -To create a GLMM using +To create a GLMM representation ```@docs -glmm +GeneralizedLinearMixedModel ``` the distribution family for the response, and possibly the link function, must be specified. ````julia -julia> gm1 = fit!(glmm(@formula(r2 ~ 1 + a + g + b + s + m + (1|id) + (1|item)), dat[:VerbAgg], - Bernoulli())) +julia> gm1 = fit(GeneralizedLinearMixedModel, @formula(r2 ~ 1 + a + g + b + s + m + (1|id) + (1|item)), + dat[:VerbAgg], Bernoulli()) Generalized Linear Mixed Model fit by minimizing the Laplace approximation to the deviance Formula: r2 ~ 1 + a + g + b + s + m + (1 | id) + (1 | item) Distribution: Distributions.Bernoulli{Float64} @@ -304,21 +304,21 @@ Generalized Linear Mixed Model fit by minimizing the Laplace approximation to th Deviance (Laplace approximation): 8135.8329 Variance components: - Column Variance Std.Dev. - id (Intercept) 1.793470989 1.3392054 - item (Intercept) 0.117151977 0.3422747 + Column Variance Std.Dev. + id (Intercept) 1.793432300 1.33919091 + item (Intercept) 0.117130236 0.34224295 Number of obs: 7584; levels of grouping factors: 316, 24 Fixed-effects parameters: Estimate Std.Error z value P(>|z|) -(Intercept) 0.553345 0.385363 1.43591 0.1510 -a 0.0574211 0.0167527 3.42757 0.0006 -g: M 0.320792 0.191206 1.67773 0.0934 -b: scold -1.05975 0.18416 -5.75448 <1e-8 -b: shout -2.1038 0.186519 -11.2793 <1e-28 -s: self -1.05429 0.151196 -6.973 <1e-11 -m: do -0.70698 0.151009 -4.68172 <1e-5 +(Intercept) 0.552284 0.385354 1.43318 0.1518 +a 0.0574478 0.0167526 3.42919 0.0006 +g: M 0.320861 0.191204 1.6781 0.0933 +b: scold -1.05954 0.184146 -5.75381 <1e-8 +b: shout -2.10341 0.186504 -11.2781 <1e-28 +s: self -1.05388 0.151184 -6.97085 <1e-11 +m: do -0.706925 0.150997 -4.68173 <1e-5 ```` @@ -349,13 +349,13 @@ nobs(::StatisticalModel) ``` ````julia julia> loglikelihood(fm1) --163.6635299405672 +-163.6635299405682 julia> aic(fm1) -333.3270598811344 +333.3270598811364 julia> bic(fm1) -337.5306520261209 +337.5306520261229 julia> dof(fm1) # 1 fixed effect, 2 variances 3 @@ -364,7 +364,7 @@ julia> nobs(fm1) # 30 observations 30 julia> loglikelihood(gm1) --4067.9164280544696 +-4067.9164365980514 ```` @@ -385,10 +385,10 @@ This value is also accessible as the `deviance` but the user should bear in mind For example, it is not necessarily non-negative. ````julia julia> objective(fm1) -327.3270598811344 +327.3270598811364 julia> deviance(fm1) -327.3270598811344 +327.3270598811364 ```` @@ -402,7 +402,7 @@ LaplaceDeviance ``` ````julia julia> LaplaceDeviance(gm1) -8135.832856108941 +8135.83287319609 ```` @@ -423,7 +423,7 @@ julia> show(coef(fm1)) julia> show(fixef(fm1)) [1527.5] julia> show(fixef(gm1)) -[0.553345, 0.0574211, 0.320792, -1.05975, -2.1038, -1.05429, -0.70698] +[0.0574478, -1.05388, -0.706925, -1.05954, 0.320861, -2.10341, 0.552284] ```` @@ -442,13 +442,13 @@ julia> vcov(fm2) julia> vcov(gm1) 7×7 Array{Float64,2}: - 0.148505 -0.0056046 -0.00977076 -0.0169713 -0.0171437 -0.0114552 -0.0114564 - -0.0056046 0.000280653 7.1912e-5 -1.43714e-5 -2.90566e-5 -1.4797e-5 -1.02415e-5 - -0.00977076 7.1912e-5 0.0365599 -9.25611e-5 -0.00016239 -8.04416e-5 -5.25878e-5 - -0.0169713 -1.43714e-5 -9.25611e-5 0.0339151 0.0171841 0.000265792 0.000172093 - -0.0171437 -2.90566e-5 -0.00016239 0.0171841 0.0347894 0.000658953 0.000520519 - -0.0114552 -1.4797e-5 -8.04416e-5 0.000265792 0.000658953 0.0228602 0.000247779 - -0.0114564 -1.02415e-5 -5.25878e-5 0.000172093 0.000520519 0.000247779 0.0228036 + 0.148498 -0.00560449 -0.00977058 … -0.0114534 -0.0114545 + -0.00560449 0.000280648 7.19112e-5 -1.47965e-5 -1.02415e-5 + -0.00977058 7.19112e-5 0.0365591 -8.04373e-5 -5.25873e-5 + -0.0169685 -1.43715e-5 -9.25583e-5 0.000265763 0.000172081 + -0.0171409 -2.90566e-5 -0.000162386 0.000658871 0.000520472 + -0.0114534 -1.47965e-5 -8.04373e-5 … 0.0228565 0.000247746 + -0.0114545 -1.02415e-5 -5.25873e-5 0.000247746 0.0228 ```` @@ -461,10 +461,10 @@ The standard errors are the square roots of the diagonal elements of the estimat stderr ``` ````julia -julia> show(stderr(fm2)) +julia> show(StatsBase.stderr(fm2)) [6.63226, 1.50224] -julia> show(stderr(gm1)) -[0.385363, 0.0167527, 0.191206, 0.18416, 0.186519, 0.151196, 0.151009] +julia> show(StatsBase.stderr(gm1)) +[0.385354, 0.0167526, 0.191204, 0.184146, 0.186504, 0.151184, 0.150997] ```` @@ -506,9 +506,9 @@ Variance components: julia> VarCorr(gm1) Variance components: - Column Variance Std.Dev. - id (Intercept) 1.793470989 1.3392054 - item (Intercept) 0.117151977 0.3422747 + Column Variance Std.Dev. + id (Intercept) 1.793432300 1.33919091 + item (Intercept) 0.117130236 0.34224295 ```` @@ -524,10 +524,10 @@ sdest ``` ````julia julia> varest(fm2) -654.9414530367545 +654.941450830681 julia> sdest(fm2) -25.591823949002823 +25.591823905901684 ```` @@ -544,7 +544,7 @@ ranef ````julia julia> ranef(fm1) 1-element Array{Array{Float64,2},1}: - [-16.6282 0.369516 26.9747 -21.8014 53.5798 -42.4943] + [-16.6282 0.369516 … 53.5798 -42.4943] julia> ranef(fm1, named=true)[1] 1×6 Named Array{Float64,2} diff --git a/docs/src/optimization.md b/docs/src/optimization.md index 46c3458b3..e521b0d34 100644 --- a/docs/src/optimization.md +++ b/docs/src/optimization.md @@ -103,7 +103,7 @@ In the types of `LinearMixedModel` available through the `MixedModels` package, For the simple example ````julia -julia> fm1 = fit!(lmm(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff])) +julia> fm1 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|G)), dat[:Dyestuff]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + (1 | G) logLik -2 logLik AIC BIC @@ -111,7 +111,7 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. - G (Intercept) 1388.3333 37.260345 + G (Intercept) 1388.3332 37.260344 Residual 2451.2500 49.510100 Number of obs: 30; levels of grouping factors: 6 @@ -128,7 +128,7 @@ Variance components: the only random effects term in the formula is `(1|G)`, a simple, scalar random-effects term. ````julia julia> t1 = fm1.trms[1] -MixedModels.ScalarFactorReTerm{Float64,String,UInt8}(CategoricalArrays.CategoricalString{UInt8}["A", "A", "A", "A", "A", "B", "B", "B", "B", "B", "C", "C", "C", "C", "C", "D", "D", "D", "D", "D", "E", "E", "E", "E", "E", "F", "F", "F", "F", "F"], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], :G, String["(Intercept)"], 0.7525806752871207) +MixedModels.ScalarFactorReTerm{Float64,String,UInt8}(CategoricalArrays.CategoricalString{UInt8}["A", "A", "A", "A", "A", "B", "B", "B", "B", "B" … "E", "E", "E", "E", "E", "F", "F", "F", "F", "F"], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 … 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 … 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], :G, String["(Intercept)"], 0.7525806571450242) ```` @@ -146,40 +146,30 @@ julia> getθ(t1) 0.752581 julia> getΛ(t1) -0.7525806752871207 - -julia> full(t1) -30×6 Array{Float64,2}: - 1.0 0.0 0.0 0.0 0.0 0.0 - 1.0 0.0 0.0 0.0 0.0 0.0 - 1.0 0.0 0.0 0.0 0.0 0.0 - 1.0 0.0 0.0 0.0 0.0 0.0 - 1.0 0.0 0.0 0.0 0.0 0.0 - 0.0 1.0 0.0 0.0 0.0 0.0 - 0.0 1.0 0.0 0.0 0.0 0.0 - 0.0 1.0 0.0 0.0 0.0 0.0 - 0.0 1.0 0.0 0.0 0.0 0.0 - 0.0 1.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 1.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 0.0 - 0.0 0.0 0.0 0.0 1.0 0.0 - 0.0 0.0 0.0 0.0 1.0 0.0 - 0.0 0.0 0.0 0.0 1.0 0.0 - 0.0 0.0 0.0 0.0 1.0 0.0 - 0.0 0.0 0.0 0.0 0.0 1.0 - 0.0 0.0 0.0 0.0 0.0 1.0 - 0.0 0.0 0.0 0.0 0.0 1.0 - 0.0 0.0 0.0 0.0 0.0 1.0 - 0.0 0.0 0.0 0.0 0.0 1.0 +0.7525806571450242 + +julia> convert(Array{Int}, full(t1)) # matrix is floating point but all integers +30×6 Array{Int64,2}: + 1 0 0 0 0 0 + 1 0 0 0 0 0 + 1 0 0 0 0 0 + 1 0 0 0 0 0 + 1 0 0 0 0 0 + 0 1 0 0 0 0 + 0 1 0 0 0 0 + 0 1 0 0 0 0 + 0 1 0 0 0 0 + 0 1 0 0 0 0 + ⋮ ⋮ + 0 0 0 0 1 0 + 0 0 0 0 1 0 + 0 0 0 0 1 0 + 0 0 0 0 1 0 + 0 0 0 0 0 1 + 0 0 0 0 0 1 + 0 0 0 0 0 1 + 0 0 0 0 0 1 + 0 0 0 0 0 1 ```` @@ -194,7 +184,7 @@ For a `ScalarFactorReTerm` this block is a multiple of the identity, in this cas For a vector-valued random-effects term, as in ````julia -julia> fm2 = fit!(lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy])) +julia> fm2 = fit(LinearMixedModel, @formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + U + ((1 + U) | G) logLik -2 logLik AIC BIC @@ -214,7 +204,7 @@ U 10.4673 1.50224 6.96781 <1e-11 julia> t21 = fm2.trms[1] -MixedModels.VectorFactorReTerm{Float64,String,UInt8,2}(CategoricalArrays.CategoricalString{UInt8}["308", "308", "308", "308", "308", "308", "308", "308", "308", "308", "309", "309", "309", "309", "309", "309", "309", "309", "309", "309", "310", "310", "310", "310", "310", "310", "310", "310", "310", "310", "330", "330", "330", "330", "330", "330", "330", "330", "330", "330", "331", "331", "331", "331", "331", "331", "331", "331", "331", "331", "332", "332", "332", "332", "332", "332", "332", "332", "332", "332", "333", "333", "333", "333", "333", "333", "333", "333", "333", "333", "334", "334", "334", "334", "334", "334", "334", "334", "334", "334", "335", "335", "335", "335", "335", "335", "335", "335", "335", "335", "337", "337", "337", "337", "337", "337", "337", "337", "337", "337", "349", "349", "349", "349", "349", "349", "349", "349", "349", "349", "350", "350", "350", "350", "350", "350", "350", "350", "350", "350", "351", "351", "351", "351", "351", "351", "351", "351", "351", "351", "352", "352", "352", "352", "352", "352", "352", "352", "352", "352", "369", "369", "369", "369", "369", "369", "369", "369", "369", "369", "370", "370", "370", "370", "370", "370", "370", "370", "370", "370", "371", "371", "371", "371", "371", "371", "371", "371", "371", "371", "372", "372", "372", "372", "372", "372", "372", "372", "372", "372"], [1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0; 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0], [1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0; 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0], SVector{2,Float64}[[1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0]], :G, String["(Intercept)", "U"], [2], [0.929221 0.0; 0.0181684 0.222645], [1, 2, 4]) +MixedModels.VectorFactorReTerm{Float64,String,UInt8,2}(CategoricalArrays.CategoricalString{UInt8}["308", "308", "308", "308", "308", "308", "308", "308", "308", "308" … "372", "372", "372", "372", "372", "372", "372", "372", "372", "372"], [1.0 1.0 … 1.0 1.0; 0.0 1.0 … 8.0 9.0], [1.0 1.0 … 1.0 1.0; 0.0 1.0 … 8.0 9.0], StaticArrays.SArray{Tuple{2},Float64,1,2}[[1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0] … [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0]], :G, String["(Intercept)", "U"], [2], [0.929221 0.0; 0.0181684 0.222645], [1, 2, 4]) ```` @@ -227,188 +217,28 @@ VectorFactorReTerm ``` The model matrix $\bf Z$ for this model is ````julia -julia> full(t21) -180×36 Array{Float64,2}: - 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 0.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 1.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 2.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 3.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 4.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 5.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 6.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 7.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 8.0 - 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 9.0 +julia> convert(Array{Int}, full(t21)) +180×36 Array{Int64,2}: + 1 0 0 0 0 0 0 0 0 0 0 0 0 … 0 0 0 0 0 0 0 0 0 0 0 0 + 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + 1 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + 1 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + 1 5 0 0 0 0 0 0 0 0 0 0 0 … 0 0 0 0 0 0 0 0 0 0 0 0 + 1 6 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + 1 7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + 1 8 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + 1 9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 + ⋮ ⋮ ⋮ ⋱ ⋮ ⋮ ⋮ + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 3 + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 4 + 0 0 0 0 0 0 0 0 0 0 0 0 0 … 0 0 0 0 0 0 0 0 0 0 1 5 + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 6 + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 7 + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 8 + 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 9 ```` @@ -443,7 +273,7 @@ julia> getθ(t21) Random-effects terms in the model formula that have the same grouping factor are amagamated into a single `VectorFactorReTerm` object. ````julia -julia> fm3 = fit!(lmm(@formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy])) +julia> fm3 = fit(LinearMixedModel, @formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + U + (1 | G) + ((0 + U) | G) logLik -2 logLik AIC BIC @@ -451,7 +281,7 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. Corr. - G (Intercept) 584.258971 24.17145 + G (Intercept) 584.258973 24.17145 U 33.632805 5.79938 0.00 Residual 653.115782 25.55613 Number of obs: 180; levels of grouping factors: 18 @@ -463,7 +293,7 @@ U 10.4673 1.51931 6.88951 <1e-11 julia> t31 = fm3.trms[1] -MixedModels.VectorFactorReTerm{Float64,String,UInt8,2}(CategoricalArrays.CategoricalString{UInt8}["308", "308", "308", "308", "308", "308", "308", "308", "308", "308", "309", "309", "309", "309", "309", "309", "309", "309", "309", "309", "310", "310", "310", "310", "310", "310", "310", "310", "310", "310", "330", "330", "330", "330", "330", "330", "330", "330", "330", "330", "331", "331", "331", "331", "331", "331", "331", "331", "331", "331", "332", "332", "332", "332", "332", "332", "332", "332", "332", "332", "333", "333", "333", "333", "333", "333", "333", "333", "333", "333", "334", "334", "334", "334", "334", "334", "334", "334", "334", "334", "335", "335", "335", "335", "335", "335", "335", "335", "335", "335", "337", "337", "337", "337", "337", "337", "337", "337", "337", "337", "349", "349", "349", "349", "349", "349", "349", "349", "349", "349", "350", "350", "350", "350", "350", "350", "350", "350", "350", "350", "351", "351", "351", "351", "351", "351", "351", "351", "351", "351", "352", "352", "352", "352", "352", "352", "352", "352", "352", "352", "369", "369", "369", "369", "369", "369", "369", "369", "369", "369", "370", "370", "370", "370", "370", "370", "370", "370", "370", "370", "371", "371", "371", "371", "371", "371", "371", "371", "371", "371", "372", "372", "372", "372", "372", "372", "372", "372", "372", "372"], [1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0; 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0], [1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0; 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 0.0 1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0], SVector{2,Float64}[[1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0]], :G, String["(Intercept)", "U"], [1, 1], [0.945818 0.0; 0.0 0.226927], [1, 4]) +MixedModels.VectorFactorReTerm{Float64,String,UInt8,2}(CategoricalArrays.CategoricalString{UInt8}["308", "308", "308", "308", "308", "308", "308", "308", "308", "308" … "372", "372", "372", "372", "372", "372", "372", "372", "372", "372"], [1.0 1.0 … 1.0 1.0; 0.0 1.0 … 8.0 9.0], [1.0 1.0 … 1.0 1.0; 0.0 1.0 … 8.0 9.0], StaticArrays.SArray{Tuple{2},Float64,1,2}[[1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0] … [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [1.0, 3.0], [1.0, 4.0], [1.0, 5.0], [1.0, 6.0], [1.0, 7.0], [1.0, 8.0], [1.0, 9.0]], :G, String["(Intercept)", "U"], [1, 1], [0.945818 0.0; 0.0 0.226927], [1, 4]) ```` @@ -490,7 +320,7 @@ julia> getθ(t31) Random-effects terms with distinct grouping factors generate distinct elements of the `trms` member of the `LinearMixedModel` object. Multiple `AbstractFactorReTerm` (i.e. either a `ScalarFactorReTerm` or a `VectorFactorReTerm`) objects are sorted by decreasing numbers of random effects. ````julia -julia> fm4 = fit!(lmm(@formula(Y ~ 1 + (1|H) + (1|G)), dat[:Penicillin])) +julia> fm4 = fit(LinearMixedModel, @formula(Y ~ 1 + (1|H) + (1|G)), dat[:Penicillin]) Linear mixed model fit by maximum likelihood Formula: Y ~ 1 + (1 | H) + (1 | G) logLik -2 logLik AIC BIC @@ -499,7 +329,7 @@ Linear mixed model fit by maximum likelihood Variance components: Column Variance Std.Dev. G (Intercept) 0.7149795 0.8455646 - H (Intercept) 3.1351920 1.7706474 + H (Intercept) 3.1351924 1.7706474 Residual 0.3024264 0.5499331 Number of obs: 144; levels of grouping factors: 24, 6 @@ -509,10 +339,10 @@ Variance components: julia> t41 = fm4.trms[1] -MixedModels.ScalarFactorReTerm{Float64,String,UInt8}(CategoricalArrays.CategoricalString{UInt8}["a", "a", "a", "a", "a", "a", "b", "b", "b", "b", "b", "b", "c", "c", "c", "c", "c", "c", "d", "d", "d", "d", "d", "d", "e", "e", "e", "e", "e", "e", "f", "f", "f", "f", "f", "f", "g", "g", "g", "g", "g", "g", "h", "h", "h", "h", "h", "h", "i", "i", "i", "i", "i", "i", "j", "j", "j", "j", "j", "j", "k", "k", "k", "k", "k", "k", "l", "l", "l", "l", "l", "l", "m", "m", "m", "m", "m", "m", "n", "n", "n", "n", "n", "n", "o", "o", "o", "o", "o", "o", "p", "p", "p", "p", "p", "p", "q", "q", "q", "q", "q", "q", "r", "r", "r", "r", "r", "r", "s", "s", "s", "s", "s", "s", "t", "t", "t", "t", "t", "t", "u", "u", "u", "u", "u", "u", "v", "v", "v", "v", "v", "v", "w", "w", "w", "w", "w", "w", "x", "x", "x", "x", "x", "x"], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], :G, String["(Intercept)"], 1.5375772607142872) +MixedModels.ScalarFactorReTerm{Float64,String,UInt8}(CategoricalArrays.CategoricalString{UInt8}["a", "a", "a", "a", "a", "a", "b", "b", "b", "b" … "w", "w", "w", "w", "x", "x", "x", "x", "x", "x"], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 … 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 … 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], :G, String["(Intercept)"], 1.5375772637253458) julia> t42 = fm4.trms[2] -MixedModels.ScalarFactorReTerm{Float64,String,UInt8}(CategoricalArrays.CategoricalString{UInt8}["A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F", "A", "B", "C", "D", "E", "F"], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], :H, String["(Intercept)"], 3.219750514910774) +MixedModels.ScalarFactorReTerm{Float64,String,UInt8}(CategoricalArrays.CategoricalString{UInt8}["A", "B", "C", "D", "E", "F", "A", "B", "C", "D" … "C", "D", "E", "F", "A", "B", "C", "D", "E", "F"], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 … 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 … 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0], :H, String["(Intercept)"], 3.219750701314265) ```` @@ -525,7 +355,7 @@ Note that the first `ScalarFactorReTerm` in `fm4.trms` corresponds to grouping f An optional `Bool` argument of `true` in the call to `fit!` of a `LinearMixedModel` causes printing of the objective and the $\theta$ parameter at each evaluation during the optimization. ````julia -julia> fit!(lmm(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff]), true); +julia> fit!(LinearMixedModel(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff]), true); f_1: 327.76702 [1.0] f_2: 331.03619 [1.75] f_3: 330.64583 [0.25] @@ -545,7 +375,7 @@ f_16: 327.32706 [0.752509] f_17: 327.32706 [0.752591] f_18: 327.32706 [0.752581] -julia> fit!(lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]), true); +julia> fit!(LinearMixedModel(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]), true); f_1: 1784.6423 [1.0, 0.0, 1.0] f_2: 1790.12564 [1.75, 0.0, 1.0] f_3: 1798.99962 [1.0, 1.0, 1.0] @@ -618,7 +448,7 @@ object, which is the `optsum` member of the `LinearMixedModel`. ````julia julia> fm2.optsum Initial parameter vector: [1.0, 0.0, 1.0] -Initial objective value: 1784.6422961924686 +Initial objective value: 1784.6422961924507 Optimizer (from NLopt): LN_BOBYQA Lower bounds: [0.0, -Inf, 0.0] @@ -631,7 +461,7 @@ maxfeval: -1 Function evaluations: 57 Final parameter vector: [0.929221, 0.0181684, 0.222645] -Final objective value: 1751.9393444646773 +Final objective value: 1751.9393444646757 Return code: FTOL_REACHED @@ -648,7 +478,7 @@ To modify the optimization process the input fields can be changed after constru Suppose, for example, that the user wishes to try a [Nelder-Mead](https://en.wikipedia.org/wiki/Nelder%E2%80%93Mead_method) optimization method instead of the default [`BOBYQA`](https://en.wikipedia.org/wiki/BOBYQA) (Bounded Optimization BY Quadratic Approximation) method. ````julia -julia> fm2 = lmm(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]); +julia> fm2 = LinearMixedModel(@formula(Y ~ 1 + U + (1+U|G)), dat[:sleepstudy]); julia> fm2.optsum.optimizer = :LN_NELDERMEAD; @@ -673,7 +503,7 @@ U 10.4673 1.50222 6.9679 <1e-11 julia> fm2.optsum Initial parameter vector: [1.0, 0.0, 1.0] -Initial objective value: 1784.6422961924686 +Initial objective value: 1784.6422961924507 Optimizer (from NLopt): LN_NELDERMEAD Lower bounds: [0.0, -Inf, 0.0] @@ -686,7 +516,7 @@ maxfeval: -1 Function evaluations: 140 Final parameter vector: [0.929236, 0.0181688, 0.222641] -Final objective value: 1751.9393444750278 +Final objective value: 1751.9393444749974 Return code: FTOL_REACHED @@ -738,7 +568,7 @@ Poisson The `glmm` function generates, but does not fit, a `GeneralizedLinearMixedModel` object. ````julia -julia> mdl = glmm(@formula(r2 ~ 1 + a + g + b + s + (1|id) + (1|item)), +julia> mdl = GeneralizedLinearMixedModel(@formula(r2 ~ 1 + a + g + b + s + (1|id) + (1|item)), dat[:VerbAgg], Bernoulli()); julia> typeof(mdl) @@ -757,12 +587,12 @@ The starting estimate for $\beta$ is determined by fitting a GLM to the fixed-ef ````julia julia> mdl.β 6-element Array{Float64,1}: - 0.206053 0.0399404 - 0.231317 + -0.776656 -0.794186 + 0.231317 -1.53919 - -0.776656 + 0.206053 ```` @@ -798,19 +628,19 @@ The second and third arguments are optional logical values indicating if $\beta$ ````julia julia> pirls!(mdl, true, true) varyβ = true -obj₀ = 10210.8534389054 -β = [0.206053, 0.0399404, 0.231317, -0.794186, -1.53919, -0.776656] +obj₀ = 10210.853438905406 +β = [0.0399404, -0.776656, -0.794186, 0.231317, -1.53919, 0.206053] iter = 1 obj = 8301.483049027265 iter = 2 obj = 8205.604285133919 iter = 3 -obj = 8201.896597466888 +obj = 8201.89659746689 iter = 4 -obj = 8201.848598910705 +obj = 8201.848598910707 iter = 5 -obj = 8201.848559060703 -8201.848559060703 +obj = 8201.848559060705 +8201.848559060705 ```` @@ -818,7 +648,7 @@ obj = 8201.848559060703 ````julia julia> LaplaceDeviance(mdl) -8201.848559060703 +8201.848559060705 ```` @@ -827,12 +657,12 @@ julia> LaplaceDeviance(mdl) ````julia julia> mdl.β 6-element Array{Float64,1}: - 0.218535 0.0514385 - 0.290225 + -0.979493 -0.979124 + 0.290225 -1.95402 - -0.979493 + 0.218535 ```` @@ -857,8 +687,8 @@ Given a value of $\theta$, PIRLS is used to determine the conditional estimate o ````julia julia> mdl.b # conditional modes of b 2-element Array{Array{Float64,2},1}: - [-0.600772 -1.93227 -0.0601337 0.455415 -0.0601337 -0.0592371 -0.30784 -1.35294 -0.358272 1.80735 0.885198 1.12211 -1.11049 1.12236 0.495591 0.239355 0.546633 -0.191338 -2.39926 -0.0592457 -0.622114 2.07196 1.89528 -0.0641204 -1.26575 -1.19175 -0.593128 1.14862 0.0422574 1.16389 1.27252 1.50087 -0.811902 -0.318641 -0.0592457 2.23746 -1.4786 -0.467503 -1.16637 -2.07574 -0.0165811 -0.400881 -1.76538 -0.551235 -0.844867 1.27252 1.35156 -1.59171 -0.57348 -0.485627 -1.26823 0.504362 1.71923 -0.272538 -0.748993 2.39518 0.20067 1.47384 -0.933614 -1.02185 0.983772 1.6407 -0.144554 0.853269 -0.102709 -1.13932 -0.187214 2.63091 0.0260731 0.110112 -1.19175 -0.0581051 -1.76538 -0.145291 -1.65537 -0.951261 -1.02185 -1.85832 -0.860825 -0.385634 -1.29466 1.1899 -0.400881 -0.676971 0.419856 -1.70552 1.2312 0.800493 -0.4733 -1.37962 -1.61581 -1.90904 0.127412 -1.54081 0.955177 0.582894 -2.01224 0.370468 -1.11141 -1.23199 0.0270239 -1.1449 0.455415 -0.326504 1.55552 -0.148923 -1.0317 1.31086 -1.49747 -1.82001 -1.23199 -1.23199 0.495591 0.154033 1.71865 0.419856 0.673514 0.0687165 -1.17128 0.519487 0.196693 0.285552 -0.990087 2.40442 -0.403554 0.452674 -0.551235 -0.0592371 -0.899086 0.727151 0.97952 1.88793 -1.28187 -1.69498 0.885198 -1.3094 -1.19175 0.0413243 0.0974061 -1.34045 -0.676971 0.788556 0.758156 0.538004 2.52793 -0.357865 -0.400881 0.752965 -0.0601337 -1.02185 0.916215 0.97952 0.470617 0.970099 0.916215 -1.35294 -0.495346 0.631209 -0.368694 1.55552 1.01181 -1.07266 0.0422574 0.196693 -0.233763 0.455415 -0.257214 -0.187877 0.0839808 -0.106517 -0.106517 0.61619 -0.488501 -0.728311 0.370468 0.727151 2.26221 -0.446023 0.4979 0.727151 -1.37888 -0.276198 0.666102 0.452972 -1.35294 -0.760884 0.495591 0.127412 0.127412 -0.661964 -1.98915 -0.144554 0.285552 -0.318641 3.01725 -0.875977 -1.39182 -0.606988 -0.606988 -0.0175635 1.52097 0.215855 1.03969 -0.53759 -0.850511 -0.0439729 0.58084 -1.59171 1.53806 0.910396 0.574223 1.91443 0.370468 -0.776166 0.658174 -0.657997 0.700173 0.532272 1.53806 1.04137 1.87518 1.79681 0.0260731 -0.318641 0.468222 -0.272538 0.71583 0.640614 0.452674 0.71583 0.800493 -1.17128 1.12211 0.512612 1.31387 -0.782349 -0.53759 -1.23199 -1.07131 -2.26277 -0.823849 1.63732 0.758156 0.559209 -1.85832 -0.102709 0.013146 -1.51623 -1.41831 0.540393 -2.12861 1.59592 -0.495346 -0.214573 0.111374 -0.63504 0.758156 -0.564723 1.61572 1.89528 0.71583 2.22592 2.12281 1.60657 0.243107 0.370468 0.0675594 -0.844867 -0.233763 -0.453115 0.71583 1.44666 0.61619 0.243107 0.811215 -0.326504 -0.453115 -0.230469 -1.41884 -0.699423 1.14862 -0.558273 -0.733854 1.12211 0.937422 0.428137 -0.802867 0.540393 0.285552 -0.4733 0.0734134 -0.229832 1.36695 1.51467 -0.833638 -1.75866 -1.27226 -0.871847 0.282017 0.196669 -0.906921 -1.26823 0.773305 -0.257822 0.673514 1.72793 -0.593128 0.623469 -0.284328 0.0260606 0.0885882 0.561762 -0.187877 -2.53791 -0.144554 -0.575224] - [-0.186364 0.180552 0.699755 0.290529 0.310311 0.637592 0.14885 0.00760348 0.203228 0.649678 0.313762 0.641038 -0.186364 0.0214228 -0.164584 -0.502754 -0.384328 -0.715767 -0.524248 -0.731414 -1.07929 0.310673 0.282092 -0.221974] + [-0.600772 -1.93227 … -0.144554 -0.575224] + [-0.186364 0.180552 … 0.282092 -0.221974] ```` @@ -915,7 +745,7 @@ The optimization process is summarized by ````julia julia> mdl.LMM.optsum Initial parameter vector: [1.0, 1.0] -Initial objective value: 8201.848559060625 +Initial objective value: 8201.848559060627 Optimizer (from NLopt): LN_BOBYQA Lower bounds: [0.0, 0.0] @@ -928,7 +758,7 @@ maxfeval: -1 Function evaluations: 37 Final parameter vector: [1.33956, 0.496833] -Final objective value: 8151.583340132135 +Final objective value: 8151.583340132134 Return code: FTOL_REACHED @@ -940,9 +770,9 @@ Return code: FTOL_REACHED As one would hope, given the name of the option, this fit is comparatively fast. ````julia -julia> @time(fit!(glmm(@formula(r2 ~ 1 + a + g + b + s + (1 | id) + (1 | item)), +julia> @time(fit!(GeneralizedLinearMixedModel(@formula(r2 ~ 1 + a + g + b + s + (1 | id) + (1 | item)), dat[:VerbAgg], Bernoulli()), fast=true)) - 0.290785 seconds (87.57 k allocations: 9.897 MiB) + 0.410434 seconds (87.56 k allocations: 10.268 MiB, 36.42% gc time) Generalized Linear Mixed Model fit by minimizing the Laplace approximation to the deviance Formula: r2 ~ 1 + a + g + b + s + (1 | id) + (1 | item) Distribution: Distributions.Bernoulli{Float64} @@ -977,7 +807,7 @@ The alternative algorithm is to use PIRLS to find the conditional mode of the ra Because it is slower to incorporate the $\beta$ parameters in the general nonlinear optimization, the fast fit is performed first and used to determine starting estimates for the more general optimization. ````julia -julia> @time mdl1 = fit!(glmm(@formula(r2 ~ 1+a+g+b+s+(1|id)+(1|item)), +julia> @time mdl1 = fit!(GeneralizedLinearMixedModel(@formula(r2 ~ 1+a+g+b+s+(1|id)+(1|item)), dat[:VerbAgg], Bernoulli()), verbose = true) f_1: 8201.84856 [1.0, 1.0] f_2: 8190.11782 [1.75, 1.0] @@ -1016,226 +846,853 @@ f_34: 8151.58334 [1.33953, 0.496902] f_35: 8151.58334 [1.33963, 0.496867] f_36: 8151.58334 [1.33963, 0.496803] f_37: 8151.58334 [1.33956, 0.496833] -f_1: 8151.58334 [0.208273, 0.0543791, 0.304089, -1.0165, -2.0218, -1.01344, 1.33956, 0.496833] -f_2: 8152.77924 [0.343415, 0.0543791, 0.304089, -1.0165, -2.0218, -1.01344, 1.33956, 0.496833] -f_3: 8152.40989 [0.208273, 0.0599636, 0.304089, -1.0165, -2.0218, -1.01344, 1.33956, 0.496833] -f_4: 8151.66517 [0.208273, 0.0543791, 0.36783, -1.0165, -2.0218, -1.01344, 1.33956, 0.496833] -f_5: 8151.80713 [0.208273, 0.0543791, 0.304089, -0.930653, -2.0218, -1.01344, 1.33956, 0.496833] -f_6: 8152.01341 [0.208273, 0.0543791, 0.304089, -1.0165, -1.93539, -1.01344, 1.33956, 0.496833] -f_7: 8151.92522 [0.208273, 0.0543791, 0.304089, -1.0165, -2.0218, -0.943146, 1.33956, 0.496833] -f_8: 8152.13673 [0.208273, 0.0543791, 0.304089, -1.0165, -2.0218, -1.01344, 1.38956, 0.496833] -f_9: 8151.94237 [0.208273, 0.0543791, 0.304089, -1.0165, -2.0218, -1.01344, 1.33956, 0.546833] -f_10: 8152.55362 [0.0731315, 0.0543791, 0.304089, -1.0165, -2.0218, -1.01344, 1.33956, 0.496833] -f_11: 8152.45791 [0.208273, 0.0487947, 0.304089, -1.0165, -2.0218, -1.01344, 1.33956, 0.496833] -f_12: 8151.74834 [0.208273, 0.0543791, 0.240348, -1.0165, -2.0218, -1.01344, 1.33956, 0.496833] -f_13: 8151.75339 [0.208273, 0.0543791, 0.304089, -1.10234, -2.0218, -1.01344, 1.33956, 0.496833] -f_14: 8151.54498 [0.208273, 0.0543791, 0.304089, -1.0165, -2.10821, -1.01344, 1.33956, 0.496833] -f_15: 8151.60826 [0.208273, 0.0543791, 0.304089, -1.0165, -2.0218, -1.08374, 1.33956, 0.496833] -f_16: 8152.10285 [0.208273, 0.0543791, 0.304089, -1.0165, -2.0218, -1.01344, 1.28956, 0.496833] -f_17: 8152.03137 [0.208273, 0.0543791, 0.304089, -1.0165, -2.0218, -1.01344, 1.33956, 0.446833] -f_18: 8151.51379 [0.201253, 0.0544707, 0.313207, -1.02246, -2.0729, -1.04381, 1.33892, 0.499364] -f_19: 8151.50849 [0.201834, 0.0544477, 0.307696, -1.01938, -2.0686, -1.03218, 1.33889, 0.499004] -f_20: 8151.50725 [0.202301, 0.0544428, 0.307435, -1.01918, -2.06521, -1.02573, 1.33894, 0.498847] -f_21: 8151.50162 [0.202016, 0.0544549, 0.313046, -1.02218, -2.06781, -1.02509, 1.33895, 0.499106] -f_22: 8151.49719 [0.201869, 0.0544511, 0.323801, -1.02524, -2.07338, -1.02274, 1.33896, 0.499171] -f_23: 8151.49776 [0.201428, 0.054456, 0.329556, -1.02585, -2.07693, -1.02338, 1.33892, 0.499332] -f_24: 8151.49833 [0.202362, 0.0544455, 0.322284, -1.01796, -2.06941, -1.02202, 1.33901, 0.498991] -f_25: 8151.47933 [0.21279, 0.0544572, 0.325473, -1.02599, -2.07775, -1.02353, 1.33891, 0.499369] -f_26: 8151.4726 [0.222467, 0.0544635, 0.329038, -1.02663, -2.07984, -1.02578, 1.33882, 0.49981] -f_27: 8151.47283 [0.225105, 0.0544704, 0.336044, -1.0282, -2.0841, -1.02757, 1.33882, 0.499864] -f_28: 8151.47847 [0.228848, 0.0544831, 0.327657, -1.02586, -2.0846, -1.02593, 1.33791, 0.502905] -f_29: 8151.46851 [0.224444, 0.0544557, 0.33183, -1.02696, -2.08144, -1.02799, 1.33933, 0.497571] -f_30: 8151.47013 [0.225486, 0.0544539, 0.332178, -1.02102, -2.08508, -1.03057, 1.33908, 0.495344] -f_31: 8151.47069 [0.223228, 0.0544499, 0.329744, -1.02617, -2.07695, -1.0269, 1.33935, 0.493755] -f_32: 8151.45968 [0.225148, 0.054447, 0.330422, -1.0322, -2.0856, -1.03097, 1.34008, 0.495691] -f_33: 8151.45447 [0.227992, 0.0544292, 0.328456, -1.04239, -2.09654, -1.03672, 1.34127, 0.493969] -f_34: 8151.45918 [0.229311, 0.0544325, 0.330086, -1.04413, -2.10154, -1.03828, 1.34138, 0.490434] -f_35: 8151.45638 [0.2284, 0.0544054, 0.32812, -1.04043, -2.09418, -1.03818, 1.34328, 0.495434] -f_36: 8151.47958 [0.229351, 0.0540475, 0.330137, -1.04418, -2.1017, -1.03833, 1.34139, 0.493772] -f_37: 8151.43503 [0.228357, 0.0549791, 0.327915, -1.04312, -2.09728, -1.03693, 1.34131, 0.494389] -f_38: 8151.42873 [0.231501, 0.0554914, 0.325634, -1.04948, -2.11058, -1.03573, 1.34117, 0.49519] -f_39: 8151.4412 [0.234693, 0.0558142, 0.328494, -1.0496, -2.11166, -1.03685, 1.3439, 0.493868] -f_40: 8151.42228 [0.229747, 0.0554225, 0.322082, -1.05074, -2.10709, -1.03865, 1.33851, 0.495817] -f_41: 8151.41519 [0.230876, 0.0554716, 0.320353, -1.05515, -2.09995, -1.05066, 1.33745, 0.495698] -f_42: 8151.41837 [0.231917, 0.0554831, 0.322185, -1.05672, -2.09334, -1.0619, 1.33756, 0.495119] -f_43: 8151.42394 [0.233136, 0.0555808, 0.32198, -1.05902, -2.10776, -1.05438, 1.33224, 0.495585] -f_44: 8151.41764 [0.236975, 0.0554898, 0.312582, -1.06553, -2.10014, -1.05299, 1.33766, 0.495093] -f_45: 8151.41525 [0.229295, 0.0557859, 0.319215, -1.05245, -2.09448, -1.04805, 1.3376, 0.495778] -f_46: 8151.41722 [0.231816, 0.055567, 0.318097, -1.05656, -2.09244, -1.051, 1.33635, 0.495196] -f_47: 8151.41829 [0.229246, 0.0553928, 0.323776, -1.05237, -2.09431, -1.04797, 1.3376, 0.49578] -f_48: 8151.41089 [0.23074, 0.0557502, 0.316397, -1.05472, -2.10042, -1.05426, 1.33876, 0.49653] -f_49: 8151.41198 [0.232381, 0.0558152, 0.311411, -1.05926, -2.10384, -1.05568, 1.33882, 0.497362] -f_50: 8151.41477 [0.228963, 0.0556417, 0.315424, -1.05848, -2.09421, -1.05104, 1.33883, 0.496554] -f_51: 8151.41292 [0.232189, 0.0557031, 0.317655, -1.04737, -2.10054, -1.05632, 1.33933, 0.494855] -f_52: 8151.41441 [0.22879, 0.0556312, 0.315329, -1.0514, -2.0936, -1.05682, 1.33883, 0.496556] -f_53: 8151.40914 [0.234485, 0.0559177, 0.316945, -1.05997, -2.10519, -1.05689, 1.33921, 0.496603] -f_54: 8151.40927 [0.235094, 0.0559259, 0.316847, -1.06104, -2.10611, -1.05686, 1.33936, 0.496869] -f_55: 8151.40911 [0.235022, 0.0559492, 0.317208, -1.06086, -2.10513, -1.05778, 1.33921, 0.496598] -f_56: 8151.40846 [0.233993, 0.0559595, 0.31682, -1.06114, -2.10514, -1.05791, 1.33928, 0.495859] -f_57: 8151.40805 [0.23308, 0.0559846, 0.316397, -1.06299, -2.10672, -1.05809, 1.33938, 0.494842] -f_58: 8151.40826 [0.233712, 0.0560214, 0.315647, -1.06535, -2.10885, -1.05843, 1.33934, 0.494775] -f_59: 8151.40819 [0.233483, 0.0561039, 0.316287, -1.06309, -2.10633, -1.05641, 1.33857, 0.495043] -f_60: 8151.40797 [0.232771, 0.0560063, 0.31632, -1.0629, -2.10645, -1.05641, 1.3392, 0.494474] -f_61: 8151.40763 [0.232275, 0.0560265, 0.316495, -1.06273, -2.1066, -1.05709, 1.33901, 0.494798] -f_62: 8151.40754 [0.229639, 0.0560254, 0.317213, -1.06343, -2.10694, -1.05686, 1.33875, 0.495405] -f_63: 8151.40735 [0.230323, 0.0560417, 0.316948, -1.06182, -2.10841, -1.05698, 1.33846, 0.495079] -f_64: 8151.40732 [0.23012, 0.0560411, 0.316791, -1.06115, -2.10633, -1.05758, 1.33844, 0.495028] -f_65: 8151.40726 [0.230333, 0.0560435, 0.316886, -1.06161, -2.10713, -1.05735, 1.33854, 0.494993] -f_66: 8151.40654 [0.229963, 0.0561, 0.317912, -1.06128, -2.10728, -1.05793, 1.33895, 0.494786] -f_67: 8151.4058 [0.229034, 0.0562062, 0.320031, -1.06027, -2.10748, -1.05919, 1.33958, 0.494575] -f_68: 8151.40722 [0.228071, 0.0562951, 0.324567, -1.0565, -2.10567, -1.06123, 1.33996, 0.49462] -f_69: 8151.40547 [0.229008, 0.0563671, 0.319119, -1.06173, -2.10802, -1.06108, 1.33942, 0.49495] -f_70: 8151.40546 [0.228315, 0.0564303, 0.319475, -1.06332, -2.1105, -1.06202, 1.34033, 0.494093] -f_71: 8151.40553 [0.227978, 0.0564628, 0.318999, -1.06404, -2.11044, -1.06335, 1.34014, 0.494105] -f_72: 8151.40555 [0.228982, 0.0564232, 0.318806, -1.06208, -2.10996, -1.06219, 1.34019, 0.494691] -f_73: 8151.40568 [0.229176, 0.0564338, 0.319563, -1.06334, -2.111, -1.06204, 1.3401, 0.494158] -f_74: 8151.4053 [0.227716, 0.0564417, 0.319327, -1.06335, -2.10982, -1.06192, 1.34031, 0.493973] -f_75: 8151.40528 [0.2272, 0.05643, 0.319197, -1.06317, -2.10818, -1.06203, 1.34021, 0.493859] -f_76: 8151.40483 [0.226687, 0.056436, 0.318983, -1.06317, -2.10933, -1.06129, 1.34005, 0.494141] -f_77: 8151.40463 [0.22538, 0.0565257, 0.318266, -1.0638, -2.11162, -1.0608, 1.33976, 0.493626] -f_78: 8151.40478 [0.225658, 0.0565514, 0.317663, -1.06627, -2.11296, -1.06078, 1.3403, 0.494195] -f_79: 8151.40472 [0.226072, 0.0566061, 0.318564, -1.06404, -2.11085, -1.06072, 1.33963, 0.493761] -f_80: 8151.40422 [0.225057, 0.0565605, 0.319044, -1.06377, -2.11106, -1.06114, 1.33987, 0.49411] -f_81: 8151.40362 [0.222827, 0.0566586, 0.320692, -1.06331, -2.11136, -1.06179, 1.33985, 0.494459] -f_82: 8151.40403 [0.219734, 0.0569046, 0.32365, -1.06214, -2.11065, -1.06379, 1.34005, 0.494269] -f_83: 8151.40448 [0.224243, 0.0567073, 0.320273, -1.06211, -2.11313, -1.06252, 1.34036, 0.494582] -f_84: 8151.40332 [0.221408, 0.0566446, 0.322561, -1.0625, -2.11134, -1.0605, 1.3397, 0.494653] -f_85: 8151.40353 [0.221186, 0.0565914, 0.323377, -1.06032, -2.11111, -1.05956, 1.33981, 0.494786] -f_86: 8151.40322 [0.22033, 0.0567388, 0.323052, -1.06402, -2.11242, -1.06092, 1.33974, 0.494633] -f_87: 8151.40357 [0.220632, 0.0567788, 0.322528, -1.06629, -2.11265, -1.06176, 1.33921, 0.494429] -f_88: 8151.40378 [0.221899, 0.0567197, 0.323678, -1.06413, -2.11268, -1.06124, 1.33976, 0.494588] -f_89: 8151.40293 [0.219314, 0.0567806, 0.32276, -1.06408, -2.11208, -1.06046, 1.34012, 0.494394] -f_90: 8151.40262 [0.21678, 0.0569205, 0.323197, -1.06451, -2.11257, -1.06052, 1.34026, 0.494228] -f_91: 8151.40277 [0.212945, 0.0571722, 0.3245, -1.06387, -2.11531, -1.06005, 1.33995, 0.494403] -f_92: 8151.40291 [0.21517, 0.0570524, 0.323317, -1.06505, -2.11304, -1.06112, 1.34105, 0.494095] -f_93: 8151.40286 [0.214719, 0.0570045, 0.32411, -1.06452, -2.11304, -1.06009, 1.33983, 0.493364] -f_94: 8151.40256 [0.216858, 0.0569259, 0.323662, -1.06533, -2.11224, -1.06057, 1.34013, 0.49475] -f_95: 8151.40233 [0.21581, 0.0569865, 0.32359, -1.06507, -2.11286, -1.06032, 1.34006, 0.49498] -f_96: 8151.40207 [0.213479, 0.0571208, 0.323625, -1.06567, -2.11342, -1.06014, 1.3399, 0.495524] -f_97: 8151.40239 [0.209969, 0.0573482, 0.324326, -1.06736, -2.11418, -1.06054, 1.33973, 0.495827] -f_98: 8151.40231 [0.214165, 0.0571102, 0.323429, -1.06678, -2.11459, -1.0594, 1.34014, 0.495325] -f_99: 8151.40175 [0.211207, 0.0571651, 0.324385, -1.06406, -2.11271, -1.05943, 1.34026, 0.495837] -f_100: 8151.40196 [0.2081, 0.0572868, 0.32591, -1.06348, -2.11262, -1.05892, 1.34064, 0.496062] -f_101: 8151.40194 [0.209078, 0.0571148, 0.324028, -1.06445, -2.11324, -1.05922, 1.34035, 0.495707] -f_102: 8151.40146 [0.210362, 0.0571892, 0.323546, -1.06424, -2.11263, -1.05941, 1.3402, 0.495854] -f_103: 8151.40111 [0.208328, 0.0572922, 0.323131, -1.06442, -2.11223, -1.05866, 1.34022, 0.495143] -f_104: 8151.40115 [0.205928, 0.0574251, 0.323074, -1.06446, -2.11282, -1.0585, 1.34037, 0.495289] -f_105: 8151.4011 [0.207997, 0.0572982, 0.323115, -1.06425, -2.11215, -1.0591, 1.33957, 0.494936] -f_106: 8151.40097 [0.207305, 0.0573195, 0.323289, -1.06386, -2.11121, -1.05915, 1.33991, 0.494862] -f_107: 8151.40095 [0.205623, 0.0573649, 0.324192, -1.06242, -2.10997, -1.05907, 1.34018, 0.494683] -f_108: 8151.40104 [0.204215, 0.0574516, 0.323811, -1.0644, -2.11026, -1.05933, 1.34012, 0.494539] -f_109: 8151.40093 [0.204211, 0.0573531, 0.323977, -1.06234, -2.11022, -1.0595, 1.34055, 0.494869] -f_110: 8151.40078 [0.205106, 0.0573623, 0.323485, -1.06188, -2.10952, -1.05984, 1.34042, 0.495007] -f_111: 8151.40075 [0.206356, 0.0572969, 0.322777, -1.06088, -2.10797, -1.06065, 1.34039, 0.495212] -f_112: 8151.40076 [0.206198, 0.0573224, 0.322862, -1.06125, -2.10886, -1.06045, 1.34032, 0.495185] -f_113: 8151.40061 [0.206117, 0.057282, 0.322701, -1.0603, -2.10811, -1.06029, 1.34022, 0.495247] -f_114: 8151.4005 [0.205622, 0.057283, 0.322747, -1.05985, -2.10794, -1.05988, 1.34012, 0.495453] -f_115: 8151.40048 [0.204723, 0.0572887, 0.32312, -1.05855, -2.10751, -1.0594, 1.34014, 0.495675] -f_116: 8151.40044 [0.204582, 0.0572965, 0.322707, -1.05847, -2.10735, -1.05919, 1.34043, 0.495517] -f_117: 8151.40025 [0.204174, 0.0572871, 0.322429, -1.05883, -2.107, -1.05894, 1.3402, 0.495585] -f_118: 8151.40008 [0.203058, 0.0572919, 0.321749, -1.05917, -2.10608, -1.05871, 1.33982, 0.495362] -f_119: 8151.40035 [0.204673, 0.0571722, 0.320158, -1.05925, -2.10445, -1.05889, 1.33967, 0.495413] -f_120: 8151.40031 [0.202852, 0.0572944, 0.321712, -1.05957, -2.10573, -1.05952, 1.34022, 0.495456] -f_121: 8151.39992 [0.202005, 0.0573629, 0.321335, -1.0592, -2.10641, -1.05804, 1.33961, 0.495539] -f_122: 8151.39986 [0.198106, 0.0574857, 0.321766, -1.05857, -2.10607, -1.05713, 1.33964, 0.495552] -f_123: 8151.39994 [0.198128, 0.0574939, 0.321626, -1.05825, -2.10633, -1.05726, 1.33971, 0.495953] -f_124: 8151.39983 [0.198228, 0.0574768, 0.321859, -1.0585, -2.10559, -1.05699, 1.33965, 0.495439] -f_125: 8151.39983 [0.198263, 0.0574678, 0.322212, -1.05826, -2.10527, -1.05661, 1.33973, 0.49562] -f_126: 8151.40008 [0.197382, 0.0574358, 0.321767, -1.05846, -2.10558, -1.05712, 1.33964, 0.495492] -f_127: 8151.39992 [0.198444, 0.0575262, 0.322018, -1.05837, -2.10539, -1.05713, 1.3397, 0.495415] -f_128: 8151.39985 [0.198245, 0.0574751, 0.321802, -1.05777, -2.10585, -1.05694, 1.33968, 0.495231] -f_129: 8151.39978 [0.198581, 0.0574822, 0.321491, -1.05859, -2.10563, -1.05646, 1.33969, 0.495424] -f_130: 8151.39985 [0.198709, 0.0574832, 0.321522, -1.05842, -2.10561, -1.05631, 1.33921, 0.495452] -f_131: 8151.39978 [0.197931, 0.0575049, 0.321334, -1.05822, -2.10516, -1.05629, 1.33965, 0.495407] -f_132: 8151.39978 [0.199085, 0.0574494, 0.321678, -1.05841, -2.10521, -1.05649, 1.33969, 0.495229] -f_133: 8151.3998 [0.199215, 0.0574692, 0.321785, -1.05891, -2.10609, -1.05663, 1.33975, 0.495467] -f_134: 8151.39979 [0.19902, 0.0574584, 0.321444, -1.05807, -2.10516, -1.05656, 1.33969, 0.495509] -f_135: 8151.3998 [0.199083, 0.0574795, 0.321698, -1.0591, -2.10604, -1.05672, 1.33973, 0.495346] -f_136: 8151.39977 [0.197648, 0.0575089, 0.321553, -1.05856, -2.1056, -1.05612, 1.33972, 0.495319] -f_137: 8151.39979 [0.196938, 0.057546, 0.321322, -1.0585, -2.10561, -1.05598, 1.33975, 0.495479] -f_138: 8151.39977 [0.197933, 0.0574944, 0.321558, -1.05854, -2.10535, -1.0562, 1.33968, 0.49523] -f_139: 8151.39977 [0.198156, 0.0574865, 0.321526, -1.05867, -2.10536, -1.05629, 1.33967, 0.495211] -f_140: 8151.39977 [0.198183, 0.0574865, 0.321336, -1.05906, -2.10525, -1.05637, 1.33965, 0.495123] -f_141: 8151.39976 [0.198381, 0.0574756, 0.321478, -1.05869, -2.10533, -1.05638, 1.33967, 0.495218] -f_142: 8151.39976 [0.198521, 0.0574666, 0.321407, -1.0588, -2.10535, -1.05642, 1.33964, 0.495158] -f_143: 8151.39976 [0.198441, 0.0574689, 0.32139, -1.05881, -2.10532, -1.05639, 1.33963, 0.495153] -f_144: 8151.39976 [0.198421, 0.0574676, 0.321384, -1.05882, -2.10526, -1.05637, 1.33964, 0.495186] -f_145: 8151.39976 [0.198387, 0.057467, 0.321343, -1.05881, -2.10513, -1.05632, 1.33964, 0.49523] -f_146: 8151.39975 [0.198313, 0.0574654, 0.321223, -1.05882, -2.10487, -1.05621, 1.33964, 0.495268] -f_147: 8151.39975 [0.197982, 0.0574693, 0.320987, -1.0588, -2.10446, -1.05598, 1.33962, 0.495262] -f_148: 8151.39976 [0.198343, 0.0574653, 0.321326, -1.0588, -2.10475, -1.05621, 1.33964, 0.495174] -f_149: 8151.39975 [0.198315, 0.0574626, 0.321082, -1.05895, -2.10476, -1.05615, 1.33963, 0.495341] -f_150: 8151.39976 [0.198324, 0.0574708, 0.321293, -1.05878, -2.10492, -1.05629, 1.33964, 0.495281] -f_151: 8151.39975 [0.198201, 0.0574646, 0.321207, -1.05886, -2.10492, -1.0561, 1.33964, 0.495287] -f_152: 8151.39975 [0.197853, 0.0574753, 0.321153, -1.0589, -2.10493, -1.05595, 1.33965, 0.495302] -f_153: 8151.39975 [0.198197, 0.0574636, 0.321154, -1.05896, -2.10484, -1.05603, 1.33964, 0.495292] -f_154: 8151.39975 [0.198159, 0.0574628, 0.321144, -1.05881, -2.10483, -1.05599, 1.33962, 0.495286] -f_155: 8151.39974 [0.198195, 0.0574584, 0.321067, -1.05863, -2.10474, -1.05585, 1.33957, 0.495297] -f_156: 8151.39974 [0.198118, 0.0574497, 0.320907, -1.05831, -2.1044, -1.0556, 1.33952, 0.495297] -f_157: 8151.39976 [0.198001, 0.0574524, 0.321048, -1.05867, -2.10476, -1.05589, 1.3395, 0.495274] -f_158: 8151.39974 [0.198213, 0.0574637, 0.320918, -1.05868, -2.10472, -1.05569, 1.33961, 0.49529] -f_159: 8151.39974 [0.198093, 0.057466, 0.320824, -1.05867, -2.10464, -1.05557, 1.3396, 0.495287] -f_160: 8151.39974 [0.198367, 0.0574632, 0.32094, -1.05882, -2.10466, -1.05564, 1.33954, 0.495286] -f_161: 8151.39973 [0.198205, 0.0574647, 0.320849, -1.0586, -2.1048, -1.05573, 1.33965, 0.495278] -f_162: 8151.39973 [0.198311, 0.057461, 0.320707, -1.05857, -2.10474, -1.05575, 1.33962, 0.495232] -f_163: 8151.39973 [0.198154, 0.0574629, 0.32078, -1.05862, -2.10471, -1.05567, 1.33969, 0.49526] -f_164: 8151.39973 [0.198104, 0.0574693, 0.320809, -1.0586, -2.10489, -1.05571, 1.3397, 0.495268] -f_165: 8151.39973 [0.19785, 0.0574833, 0.320755, -1.05863, -2.10506, -1.05565, 1.33973, 0.495269] -f_166: 8151.39973 [0.198094, 0.057467, 0.320738, -1.05854, -2.10484, -1.05562, 1.33971, 0.495287] -f_167: 8151.39973 [0.198058, 0.0574681, 0.320705, -1.05841, -2.10492, -1.05561, 1.33973, 0.495313] -f_168: 8151.39973 [0.198125, 0.0574682, 0.32064, -1.05854, -2.10496, -1.05558, 1.33974, 0.495319] -f_169: 8151.39973 [0.198179, 0.05747, 0.320604, -1.0587, -2.10509, -1.0556, 1.33975, 0.495333] -f_170: 8151.39973 [0.198159, 0.0574694, 0.320603, -1.05865, -2.10505, -1.05558, 1.33975, 0.495325] -f_171: 8151.39973 [0.198156, 0.057469, 0.320576, -1.05866, -2.10506, -1.05556, 1.33975, 0.495312] -f_172: 8151.39973 [0.198117, 0.0574695, 0.320537, -1.05868, -2.10505, -1.05551, 1.33975, 0.495303] -f_173: 8151.39973 [0.198213, 0.0574664, 0.320592, -1.05863, -2.10511, -1.05556, 1.33975, 0.495298] -f_174: 8151.39973 [0.19829, 0.0574628, 0.320652, -1.05862, -2.10524, -1.05557, 1.33975, 0.495303] -f_175: 8151.39973 [0.198369, 0.0574648, 0.320753, -1.0587, -2.10552, -1.05562, 1.33977, 0.495347] -f_176: 8151.39972 [0.198326, 0.0574586, 0.320584, -1.05866, -2.10525, -1.05549, 1.33976, 0.495292] -f_177: 8151.39972 [0.198505, 0.0574534, 0.320608, -1.05873, -2.10526, -1.05553, 1.33974, 0.4953] -f_178: 8151.39972 [0.198869, 0.0574432, 0.320569, -1.05887, -2.10535, -1.05559, 1.33971, 0.495303] -f_179: 8151.39972 [0.198992, 0.0574377, 0.320544, -1.05894, -2.10537, -1.0555, 1.33971, 0.495318] -f_180: 8151.39972 [0.198946, 0.0574396, 0.320566, -1.0589, -2.10537, -1.05553, 1.33971, 0.495315] -f_181: 8151.39972 [0.199023, 0.057436, 0.320593, -1.05877, -2.10536, -1.05553, 1.33971, 0.495335] -f_182: 8151.39972 [0.198997, 0.0574374, 0.320616, -1.05879, -2.10538, -1.05553, 1.33971, 0.495325] -f_183: 8151.39972 [0.199028, 0.057435, 0.320662, -1.05878, -2.10536, -1.05555, 1.3397, 0.495311] -f_184: 8151.39972 [0.199062, 0.0574328, 0.320669, -1.05878, -2.10534, -1.05555, 1.3397, 0.495307] -f_185: 8151.39972 [0.19898, 0.0574293, 0.320661, -1.05879, -2.10536, -1.05557, 1.33969, 0.495321] -f_186: 8151.39972 [0.199112, 0.0574294, 0.320654, -1.05881, -2.10536, -1.05554, 1.33969, 0.495308] -f_187: 8151.39972 [0.199112, 0.0574284, 0.320652, -1.05881, -2.1054, -1.05555, 1.33974, 0.495299] -f_188: 8151.39972 [0.199136, 0.0574295, 0.320653, -1.05885, -2.10541, -1.05555, 1.33971, 0.495312] -f_189: 8151.39972 [0.199101, 0.0574291, 0.320643, -1.05879, -2.10538, -1.05552, 1.33971, 0.495303] -f_190: 8151.39972 [0.199083, 0.0574289, 0.320653, -1.05883, -2.10536, -1.05549, 1.33973, 0.495336] -f_191: 8151.39972 [0.199078, 0.0574291, 0.320646, -1.05878, -2.10537, -1.05549, 1.33971, 0.49531] -f_192: 8151.39972 [0.19915, 0.0574275, 0.320619, -1.05878, -2.10541, -1.05547, 1.33973, 0.495313] -f_193: 8151.39972 [0.199023, 0.0574317, 0.320669, -1.05878, -2.10531, -1.0555, 1.3397, 0.495308] -f_194: 8151.39972 [0.199142, 0.0574264, 0.320641, -1.05879, -2.10542, -1.05547, 1.33972, 0.495317] -f_195: 8151.39972 [0.19909, 0.0574288, 0.32066, -1.05879, -2.10538, -1.0555, 1.33971, 0.49531] -f_196: 8151.39972 [0.19909, 0.0574292, 0.320675, -1.05879, -2.1054, -1.0555, 1.33971, 0.495311] -f_197: 8151.39972 [0.199089, 0.0574294, 0.320678, -1.05879, -2.1054, -1.0555, 1.33971, 0.495311] - 1.675951 seconds (427.61 k allocations: 30.332 MiB, 0.43% gc time) +f_1: 8151.58334 [0.0543791, -1.01344, -1.0165, 0.304089, -2.0218, 0.208273, 1.33956, 0.496833] +f_2: 8648.50534 [0.189521, -1.01344, -1.0165, 0.304089, -2.0218, 0.208273, 1.33956, 0.496833] +f_3: 8151.59709 [0.0543791, -1.00786, -1.0165, 0.304089, -2.0218, 0.208273, 1.33956, 0.496833] +f_4: 8151.71186 [0.0543791, -1.01344, -0.952756, 0.304089, -2.0218, 0.208273, 1.33956, 0.496833] +f_5: 8151.75117 [0.0543791, -1.01344, -1.0165, 0.389933, -2.0218, 0.208273, 1.33956, 0.496833] +f_6: 8152.01341 [0.0543791, -1.01344, -1.0165, 0.304089, -1.93539, 0.208273, 1.33956, 0.496833] +f_7: 8151.93507 [0.0543791, -1.01344, -1.0165, 0.304089, -2.0218, 0.278569, 1.33956, 0.496833] +f_8: 8152.13673 [0.0543791, -1.01344, -1.0165, 0.304089, -2.0218, 0.208273, 1.38956, 0.496833] +f_9: 8151.94237 [0.0543791, -1.01344, -1.0165, 0.304089, -2.0218, 0.208273, 1.33956, 0.546833] +f_10: 8650.53785 [-0.0807627, -1.01344, -1.0165, 0.304089, -2.0218, 0.208273, 1.33956, 0.496833] +f_11: 8151.57191 [0.0543791, -1.01903, -1.0165, 0.304089, -2.0218, 0.208273, 1.33956, 0.496833] +f_12: 8151.67196 [0.0543791, -1.01344, -1.08024, 0.304089, -2.0218, 0.208273, 1.33956, 0.496833] +f_13: 8151.8632 [0.0543791, -1.01344, -1.0165, 0.218245, -2.0218, 0.208273, 1.33956, 0.496833] +f_14: 8151.54498 [0.0543791, -1.01344, -1.0165, 0.304089, -2.10821, 0.208273, 1.33956, 0.496833] +f_15: 8151.81772 [0.0543791, -1.01344, -1.0165, 0.304089, -2.0218, 0.137977, 1.33956, 0.496833] +f_16: 8152.10285 [0.0543791, -1.01344, -1.0165, 0.304089, -2.0218, 0.208273, 1.28956, 0.496833] +f_17: 8152.03137 [0.0543791, -1.01344, -1.0165, 0.304089, -2.0218, 0.208273, 1.33956, 0.446833] +f_18: 8151.50037 [0.0545782, -1.01504, -1.0264, 0.313121, -2.07246, 0.203182, 1.33926, 0.500674] +f_19: 8151.49983 [0.054508, -1.02, -1.0132, 0.312539, -2.0735, 0.204669, 1.33929, 0.497885] +f_20: 8151.50032 [0.054517, -1.02018, -1.02354, 0.314584, -2.07166, 0.201517, 1.33882, 0.499721] +f_21: 8151.50444 [0.0545195, -1.02059, -1.0129, 0.313297, -2.0859, 0.204346, 1.33927, 0.49798] +f_22: 8151.49937 [0.0545436, -1.02093, -1.01601, 0.313368, -2.07208, 0.202343, 1.33887, 0.499223] +f_23: 8151.50045 [0.0545177, -1.01994, -1.01714, 0.313572, -2.07284, 0.202423, 1.33883, 0.498984] +f_24: 8151.50381 [0.0545532, -1.02136, -1.01598, 0.308902, -2.07501, 0.201997, 1.33883, 0.499362] +f_25: 8151.49031 [0.0545123, -1.02098, -1.01699, 0.31816, -2.07144, 0.207295, 1.34, 0.49761] +f_26: 8151.48235 [0.0545189, -1.02186, -1.02069, 0.324514, -2.07139, 0.212551, 1.34059, 0.49643] +f_27: 8151.48046 [0.0545088, -1.02404, -1.02192, 0.329026, -2.07094, 0.216693, 1.34136, 0.495824] +f_28: 8151.49589 [0.0544553, -1.02569, -1.0114, 0.319052, -2.07151, 0.229, 1.34085, 0.493857] +f_29: 8151.48049 [0.0544842, -1.02401, -1.03039, 0.325883, -2.07585, 0.219114, 1.3435, 0.489754] +f_30: 8151.47 [0.0545297, -1.02415, -1.0335, 0.323453, -2.07385, 0.21891, 1.34163, 0.494977] +f_31: 8151.4656 [0.0545298, -1.02607, -1.04547, 0.323077, -2.07396, 0.223992, 1.3416, 0.496608] +f_32: 8151.46346 [0.054515, -1.02725, -1.04366, 0.323062, -2.07649, 0.224199, 1.3424, 0.4963] +f_33: 8151.46064 [0.0545172, -1.0296, -1.0498, 0.324585, -2.07866, 0.22663, 1.34282, 0.495188] +f_34: 8151.45792 [0.0545172, -1.03425, -1.0543, 0.328765, -2.07886, 0.231375, 1.34301, 0.494065] +f_35: 8151.46191 [0.0545405, -1.03411, -1.06293, 0.331515, -2.08506, 0.234, 1.34498, 0.494754] +f_36: 8151.45368 [0.0544765, -1.03428, -1.05227, 0.331443, -2.08572, 0.230015, 1.34124, 0.493651] +f_37: 8151.45047 [0.0545181, -1.03465, -1.05513, 0.331025, -2.08911, 0.230889, 1.3401, 0.493283] +f_38: 8151.45792 [0.0545193, -1.03499, -1.0651, 0.330483, -2.08697, 0.233031, 1.33652, 0.491913] +f_39: 8151.45431 [0.0546115, -1.03466, -1.05458, 0.331301, -2.08906, 0.23223, 1.34151, 0.488605] +f_40: 8151.45292 [0.0544794, -1.03469, -1.05531, 0.323467, -2.09262, 0.229783, 1.34089, 0.492875] +f_41: 8151.87963 [0.0512051, -1.03517, -1.05608, 0.331686, -2.09076, 0.231444, 1.34012, 0.493196] +f_42: 8151.43234 [0.0551766, -1.03462, -1.05497, 0.329654, -2.0968, 0.232664, 1.33893, 0.494489] +f_43: 8151.43565 [0.0552215, -1.0345, -1.05509, 0.329481, -2.11109, 0.240377, 1.33829, 0.494463] +f_44: 8155.30807 [0.0432737, -1.03448, -1.05501, 0.328253, -2.0956, 0.234593, 1.338, 0.494402] +f_45: 8151.43826 [0.0552043, -1.03449, -1.05376, 0.326916, -2.09543, 0.23813, 1.33689, 0.49494] +f_46: 8151.45587 [0.0559145, -1.03484, -1.05396, 0.329706, -2.09598, 0.235786, 1.34204, 0.496884] +f_47: 8151.43165 [0.0551981, -1.03433, -1.05689, 0.3307, -2.10156, 0.229931, 1.33696, 0.494296] +f_48: 8151.43112 [0.0552183, -1.03484, -1.05788, 0.331358, -2.10354, 0.230467, 1.33689, 0.49547] +f_49: 8151.43231 [0.0552073, -1.0349, -1.05349, 0.330751, -2.1076, 0.233586, 1.33741, 0.494069] +f_50: 8151.45399 [0.0544731, -1.03496, -1.06045, 0.326005, -2.10231, 0.232486, 1.33761, 0.492848] +f_51: 8151.43594 [0.0552048, -1.03468, -1.05685, 0.335718, -2.10875, 0.23221, 1.33744, 0.497679] +f_52: 8151.43091 [0.0552398, -1.03539, -1.05894, 0.332055, -2.10342, 0.231034, 1.33682, 0.495435] +f_53: 8151.43914 [0.0552268, -1.03543, -1.0567, 0.332696, -2.10198, 0.229357, 1.33354, 0.498386] +f_54: 8151.4329 [0.0552282, -1.03536, -1.05944, 0.335579, -2.10401, 0.229742, 1.33769, 0.495134] +f_55: 8151.43083 [0.0552882, -1.03548, -1.05867, 0.332092, -2.10469, 0.230828, 1.33626, 0.495482] +f_56: 8151.43055 [0.0552365, -1.03549, -1.05928, 0.331588, -2.10584, 0.231298, 1.33686, 0.49603] +f_57: 8151.43198 [0.0552411, -1.03552, -1.05942, 0.332221, -2.10659, 0.232199, 1.33602, 0.49578] +f_58: 8151.42966 [0.0557064, -1.03553, -1.05945, 0.331719, -2.10625, 0.231169, 1.33682, 0.496094] +f_59: 8151.42851 [0.0554528, -1.03557, -1.05986, 0.331697, -2.10594, 0.231074, 1.33687, 0.496066] +f_60: 8151.42839 [0.0555138, -1.0356, -1.05953, 0.33188, -2.10607, 0.2312, 1.33715, 0.496169] +f_61: 8151.46056 [0.0544336, -1.0356, -1.05938, 0.331753, -2.1064, 0.230981, 1.33709, 0.496243] +f_62: 8151.4277 [0.0555266, -1.0356, -1.05998, 0.33156, -2.10635, 0.230942, 1.3373, 0.496239] +f_63: 8151.42648 [0.0554859, -1.0356, -1.06048, 0.330806, -2.10719, 0.230841, 1.33784, 0.495883] +f_64: 8151.42472 [0.0555437, -1.03561, -1.06169, 0.329449, -2.10909, 0.231155, 1.33892, 0.495714] +f_65: 8151.42438 [0.0555745, -1.03563, -1.06189, 0.329677, -2.11233, 0.230282, 1.33909, 0.495661] +f_66: 8151.42423 [0.0555857, -1.03575, -1.06329, 0.329038, -2.11139, 0.231788, 1.33938, 0.495889] +f_67: 8151.42468 [0.0555866, -1.03565, -1.06386, 0.328694, -2.11339, 0.231923, 1.34035, 0.49668] +f_68: 8151.52036 [0.0575381, -1.03578, -1.06311, 0.329412, -2.11122, 0.231281, 1.33981, 0.496044] +f_69: 8151.42417 [0.0556593, -1.03579, -1.06285, 0.329495, -2.11129, 0.231168, 1.34004, 0.496034] +f_70: 8151.42392 [0.0556741, -1.03583, -1.06311, 0.329496, -2.11129, 0.230856, 1.3401, 0.496087] +f_71: 8151.42344 [0.0556728, -1.03584, -1.06307, 0.329007, -2.11147, 0.23055, 1.33986, 0.496316] +f_72: 8151.42329 [0.0556642, -1.03584, -1.06336, 0.329126, -2.11155, 0.230107, 1.33959, 0.496462] +f_73: 8151.4226 [0.0556962, -1.03584, -1.06341, 0.328534, -2.11161, 0.229798, 1.3396, 0.496186] +f_74: 8151.42214 [0.0556788, -1.03586, -1.06337, 0.32789, -2.11175, 0.229521, 1.33956, 0.495996] +f_75: 8151.42127 [0.0557001, -1.03589, -1.06384, 0.326706, -2.11175, 0.229022, 1.33996, 0.495754] +f_76: 8151.42134 [0.0557081, -1.03591, -1.06517, 0.325026, -2.1119, 0.229019, 1.34133, 0.495869] +f_77: 8151.42073 [0.0558933, -1.03594, -1.06329, 0.326988, -2.1118, 0.228083, 1.34016, 0.495953] +f_78: 8151.42066 [0.0557229, -1.03602, -1.06298, 0.326817, -2.11189, 0.227221, 1.34015, 0.495874] +f_79: 8151.42065 [0.0557173, -1.03606, -1.06324, 0.326733, -2.11236, 0.226971, 1.3401, 0.495844] +f_80: 8151.42043 [0.0557694, -1.03603, -1.06345, 0.326995, -2.11267, 0.226739, 1.34033, 0.495984] +f_81: 8151.42058 [0.0557613, -1.03603, -1.06325, 0.326651, -2.11314, 0.227038, 1.34038, 0.496236] +f_82: 8151.42705 [0.0564142, -1.03603, -1.06342, 0.327487, -2.11295, 0.226829, 1.34005, 0.495978] +f_83: 8151.42031 [0.0558631, -1.03603, -1.06336, 0.327669, -2.11298, 0.226966, 1.34017, 0.495925] +f_84: 8151.42008 [0.0558859, -1.03607, -1.0637, 0.327563, -2.11316, 0.226736, 1.34026, 0.495989] +f_85: 8151.42002 [0.0558764, -1.03607, -1.06381, 0.327599, -2.11298, 0.226551, 1.33994, 0.49632] +f_86: 8151.42342 [0.0555816, -1.03608, -1.06348, 0.327662, -2.1135, 0.22619, 1.33985, 0.496281] +f_87: 8151.41968 [0.0559332, -1.03608, -1.06386, 0.327037, -2.11334, 0.226742, 1.3398, 0.496076] +f_88: 8151.4193 [0.0559456, -1.0361, -1.06313, 0.325843, -2.11297, 0.226822, 1.34009, 0.496043] +f_89: 8151.41798 [0.056063, -1.03619, -1.06228, 0.324284, -2.11197, 0.225275, 1.34006, 0.495278] +f_90: 8151.41955 [0.0560946, -1.03629, -1.06073, 0.325271, -2.11128, 0.225441, 1.34072, 0.494381] +f_91: 8151.41829 [0.0560013, -1.03613, -1.06187, 0.324904, -2.11124, 0.225576, 1.33958, 0.495508] +f_92: 8151.41725 [0.0560237, -1.03622, -1.06226, 0.324119, -2.11132, 0.224283, 1.34011, 0.494755] +f_93: 8151.4173 [0.0560095, -1.03628, -1.06132, 0.323768, -2.1114, 0.224189, 1.34015, 0.494377] +f_94: 8151.42557 [0.0554622, -1.0362, -1.0622, 0.32363, -2.11109, 0.224386, 1.34034, 0.494731] +f_95: 8151.41725 [0.0560035, -1.03625, -1.06222, 0.324521, -2.11115, 0.224055, 1.34022, 0.494983] +f_96: 8151.41741 [0.0561186, -1.03628, -1.06234, 0.32422, -2.11065, 0.224209, 1.33995, 0.495029] +f_97: 8151.41726 [0.0560459, -1.03628, -1.06243, 0.324698, -2.11129, 0.22406, 1.34006, 0.494657] +f_98: 8151.41692 [0.0560307, -1.03629, -1.06224, 0.324193, -2.11108, 0.223659, 1.34029, 0.49494] +f_99: 8151.41676 [0.0560885, -1.03632, -1.06214, 0.323753, -2.11096, 0.223363, 1.34046, 0.494736] +f_100: 8151.41636 [0.0560858, -1.03633, -1.06196, 0.323665, -2.11103, 0.222935, 1.34014, 0.494851] +f_101: 8151.41587 [0.0561461, -1.03642, -1.06179, 0.323335, -2.11141, 0.222202, 1.33993, 0.494659] +f_102: 8151.41615 [0.056164, -1.03655, -1.06077, 0.323981, -2.1098, 0.221461, 1.34075, 0.49452] +f_103: 8151.48685 [0.0545185, -1.03641, -1.06158, 0.322679, -2.11144, 0.22187, 1.34031, 0.495063] +f_104: 8151.41544 [0.0561525, -1.03642, -1.0615, 0.322023, -2.1117, 0.221586, 1.34031, 0.494626] +f_105: 8151.41491 [0.0562262, -1.03648, -1.06144, 0.321169, -2.11111, 0.220881, 1.34004, 0.494419] +f_106: 8151.41437 [0.0562815, -1.03654, -1.0603, 0.319425, -2.11078, 0.219262, 1.33991, 0.493741] +f_107: 8151.4149 [0.0563288, -1.0367, -1.05885, 0.318641, -2.11044, 0.219879, 1.33992, 0.49421] +f_108: 8151.41486 [0.0560723, -1.03654, -1.05998, 0.32015, -2.11031, 0.218575, 1.33954, 0.494285] +f_109: 8151.41391 [0.0562802, -1.03656, -1.06011, 0.319748, -2.10972, 0.21824, 1.33995, 0.493773] +f_110: 8151.41378 [0.056297, -1.0366, -1.05933, 0.319327, -2.10964, 0.217451, 1.33972, 0.493485] +f_111: 8151.41399 [0.0562331, -1.03655, -1.05881, 0.318412, -2.1086, 0.217672, 1.33966, 0.493544] +f_112: 8151.42906 [0.0570588, -1.03661, -1.05962, 0.318999, -2.10956, 0.217435, 1.33948, 0.493507] +f_113: 8151.41361 [0.0562857, -1.03662, -1.05965, 0.318908, -2.10963, 0.217353, 1.33942, 0.493537] +f_114: 8151.41363 [0.0563045, -1.0366, -1.05978, 0.318282, -2.10973, 0.217122, 1.33928, 0.493356] +f_115: 8151.41426 [0.0561663, -1.03664, -1.05983, 0.318624, -2.10948, 0.217447, 1.33977, 0.493446] +f_116: 8151.4138 [0.056308, -1.03664, -1.05942, 0.318636, -2.10937, 0.217414, 1.33923, 0.493239] +f_117: 8151.41354 [0.0562762, -1.0366, -1.05983, 0.318967, -2.10945, 0.216797, 1.33931, 0.493389] +f_118: 8151.4131 [0.0563303, -1.03662, -1.05998, 0.318764, -2.10924, 0.216421, 1.33946, 0.49368] +f_119: 8151.41302 [0.0562673, -1.03665, -1.06058, 0.318238, -2.10895, 0.215753, 1.33978, 0.494137] +f_120: 8151.41229 [0.0563871, -1.03672, -1.05997, 0.318098, -2.1089, 0.214878, 1.33953, 0.494105] +f_121: 8151.4117 [0.0564758, -1.03684, -1.05871, 0.318796, -2.10933, 0.213101, 1.33941, 0.494194] +f_122: 8151.41153 [0.0564932, -1.03689, -1.0583, 0.319119, -2.10813, 0.211011, 1.33896, 0.493345] +f_123: 8151.41133 [0.0565833, -1.03699, -1.05719, 0.317024, -2.10771, 0.209756, 1.33863, 0.49342] +f_124: 8151.4104 [0.0566871, -1.03709, -1.05881, 0.317071, -2.10853, 0.208503, 1.33932, 0.493672] +f_125: 8151.40958 [0.0567574, -1.03723, -1.05924, 0.318809, -2.10918, 0.207438, 1.33968, 0.494355] +f_126: 8151.40907 [0.0568832, -1.03754, -1.06002, 0.321422, -2.11169, 0.204906, 1.34026, 0.494674] +f_127: 8151.40817 [0.0571325, -1.03781, -1.06046, 0.322034, -2.11042, 0.200733, 1.34057, 0.495421] +f_128: 8151.40772 [0.0572732, -1.03803, -1.0589, 0.322074, -2.11004, 0.197259, 1.34038, 0.494812] +f_129: 8151.40793 [0.057342, -1.03813, -1.05892, 0.322816, -2.11044, 0.196315, 1.34063, 0.495083] +f_130: 8151.62671 [0.0600344, -1.03802, -1.05884, 0.321296, -2.11045, 0.197076, 1.34091, 0.494785] +f_131: 8151.40736 [0.0571884, -1.03806, -1.05916, 0.322769, -2.10964, 0.197477, 1.33925, 0.494979] +f_132: 8151.41171 [0.0575427, -1.03807, -1.05904, 0.323141, -2.10912, 0.197701, 1.3394, 0.494819] +f_133: 8151.4074 [0.0571629, -1.03802, -1.05915, 0.322866, -2.10994, 0.197018, 1.33934, 0.494985] +f_134: 8151.40731 [0.0571779, -1.03806, -1.05966, 0.32269, -2.10958, 0.197363, 1.33928, 0.494695] +f_135: 8151.40725 [0.0572115, -1.03807, -1.05966, 0.322025, -2.11001, 0.197252, 1.33939, 0.494828] +f_136: 8151.40751 [0.0570767, -1.03806, -1.05977, 0.321831, -2.10936, 0.197013, 1.33943, 0.495063] +f_137: 8151.40711 [0.0572025, -1.03807, -1.05961, 0.321806, -2.10966, 0.196888, 1.33959, 0.495109] +f_138: 8151.40698 [0.0572335, -1.03811, -1.05921, 0.320853, -2.10935, 0.196065, 1.3399, 0.495222] +f_139: 8151.40699 [0.0572984, -1.03817, -1.05807, 0.318895, -2.10877, 0.194378, 1.3401, 0.495165] +f_140: 8151.41169 [0.0576263, -1.0382, -1.0591, 0.320742, -2.10901, 0.196797, 1.33998, 0.49536] +f_141: 8151.4069 [0.05727, -1.03814, -1.05895, 0.320282, -2.10912, 0.194842, 1.33994, 0.495215] +f_142: 8151.40674 [0.0573619, -1.03827, -1.05885, 0.320452, -2.10896, 0.192601, 1.34006, 0.49527] +f_143: 8151.4066 [0.0575386, -1.03856, -1.0599, 0.322492, -2.10917, 0.188817, 1.33981, 0.495251] +f_144: 8151.40665 [0.0575904, -1.03859, -1.05903, 0.320773, -2.10877, 0.187293, 1.34019, 0.495207] +f_145: 8151.40765 [0.0574015, -1.03853, -1.06164, 0.321666, -2.10845, 0.188601, 1.34024, 0.49584] +f_146: 8151.40661 [0.057589, -1.03862, -1.05975, 0.321385, -2.10876, 0.187637, 1.34024, 0.495762] +f_147: 8151.40674 [0.0575377, -1.03855, -1.06006, 0.32191, -2.10972, 0.188662, 1.33915, 0.495433] +f_148: 8151.40658 [0.0575448, -1.03856, -1.05979, 0.322005, -2.10866, 0.188059, 1.33931, 0.49538] +f_149: 8151.40752 [0.0573726, -1.03859, -1.05969, 0.321926, -2.10835, 0.188432, 1.33917, 0.495451] +f_150: 8151.40655 [0.057563, -1.03858, -1.05989, 0.32173, -2.10902, 0.188179, 1.33964, 0.495431] +f_151: 8151.40661 [0.0575638, -1.0386, -1.06001, 0.32248, -2.1092, 0.188177, 1.33959, 0.495551] +f_152: 8151.45356 [0.0589027, -1.03859, -1.05989, 0.321721, -2.10895, 0.188237, 1.33962, 0.49543] +f_153: 8151.40643 [0.0575692, -1.03858, -1.05979, 0.321417, -2.10853, 0.188614, 1.33946, 0.495415] +f_154: 8151.40657 [0.0575383, -1.03854, -1.05904, 0.321323, -2.10864, 0.189095, 1.33891, 0.495092] +f_155: 8151.40645 [0.0575676, -1.03855, -1.05974, 0.321525, -2.10857, 0.189182, 1.33946, 0.495378] +f_156: 8151.40646 [0.0575754, -1.03858, -1.06022, 0.321916, -2.10855, 0.188667, 1.33926, 0.495335] +f_157: 8151.40711 [0.0577117, -1.03859, -1.05957, 0.321193, -2.10863, 0.189045, 1.33975, 0.495503] +f_158: 8151.40643 [0.0575682, -1.03856, -1.05971, 0.320803, -2.10822, 0.188379, 1.33941, 0.495574] +f_159: 8151.40641 [0.057573, -1.03857, -1.05974, 0.321202, -2.10832, 0.188385, 1.33948, 0.495472] +f_160: 8151.40636 [0.0575827, -1.0386, -1.05946, 0.320855, -2.10786, 0.188113, 1.33954, 0.495533] +f_161: 8151.40636 [0.0576125, -1.03864, -1.05914, 0.320138, -2.10746, 0.187101, 1.33963, 0.495392] +f_162: 8151.40649 [0.0576417, -1.0386, -1.05941, 0.320337, -2.10836, 0.188109, 1.3394, 0.495496] +f_163: 8151.40642 [0.0575837, -1.03859, -1.05906, 0.320529, -2.10802, 0.188013, 1.3398, 0.495696] +f_164: 8151.40636 [0.0576008, -1.0386, -1.05924, 0.320774, -2.1078, 0.188064, 1.33947, 0.495502] +f_165: 8151.40636 [0.0575887, -1.03861, -1.05932, 0.320704, -2.10787, 0.187979, 1.33961, 0.495384] +f_166: 8151.40694 [0.0574412, -1.03861, -1.05926, 0.32059, -2.10775, 0.18795, 1.3396, 0.495379] +f_167: 8151.40636 [0.0575887, -1.03861, -1.05931, 0.32064, -2.10785, 0.187928, 1.33956, 0.495485] +f_168: 8151.40635 [0.0575887, -1.03861, -1.0593, 0.320673, -2.10783, 0.187995, 1.33958, 0.495367] +f_169: 8151.40634 [0.0575852, -1.03861, -1.05929, 0.320654, -2.10776, 0.188017, 1.33958, 0.49537] +f_170: 8151.40632 [0.0575869, -1.03861, -1.05927, 0.3206, -2.10763, 0.188074, 1.33959, 0.495387] +f_171: 8151.4063 [0.0575838, -1.03862, -1.05924, 0.320448, -2.10736, 0.188128, 1.33962, 0.495444] +f_172: 8151.40629 [0.0575869, -1.03862, -1.05911, 0.319978, -2.10698, 0.18803, 1.33971, 0.495549] +f_173: 8151.40632 [0.0576007, -1.03864, -1.05891, 0.31986, -2.1072, 0.187765, 1.33979, 0.495373] +f_174: 8151.40742 [0.0573775, -1.03863, -1.05923, 0.319999, -2.10698, 0.188027, 1.33983, 0.495579] +f_175: 8151.40629 [0.0575822, -1.03863, -1.05923, 0.320093, -2.10692, 0.18805, 1.33984, 0.495594] +f_176: 8151.40661 [0.0574673, -1.03863, -1.05914, 0.320151, -2.10688, 0.188047, 1.33987, 0.495624] +f_177: 8151.40628 [0.0575768, -1.03863, -1.05919, 0.320162, -2.10692, 0.188086, 1.33979, 0.495656] +f_178: 8151.40628 [0.0575741, -1.03862, -1.05915, 0.320249, -2.10687, 0.188128, 1.33978, 0.49564] +f_179: 8151.40628 [0.0575692, -1.03862, -1.05908, 0.320304, -2.10691, 0.188177, 1.33979, 0.495657] +f_180: 8151.40627 [0.0575692, -1.03863, -1.05914, 0.320215, -2.10681, 0.188096, 1.33978, 0.495644] +f_181: 8151.40627 [0.0575733, -1.03863, -1.05914, 0.320266, -2.10677, 0.188059, 1.33976, 0.495629] +f_182: 8151.40625 [0.057571, -1.03863, -1.05915, 0.320351, -2.10667, 0.188035, 1.33974, 0.495589] +f_183: 8151.40622 [0.0575741, -1.03864, -1.05915, 0.320467, -2.10652, 0.188084, 1.33968, 0.495492] +f_184: 8151.40619 [0.0575695, -1.03865, -1.05929, 0.320765, -2.10617, 0.187931, 1.33944, 0.495443] +f_185: 8151.40619 [0.0575695, -1.03867, -1.05942, 0.320759, -2.10594, 0.188178, 1.33918, 0.49534] +f_186: 8151.40621 [0.0575797, -1.03867, -1.0594, 0.320767, -2.10577, 0.188251, 1.33919, 0.495504] +f_187: 8151.40618 [0.0575708, -1.03867, -1.05926, 0.321004, -2.10591, 0.188136, 1.33923, 0.495348] +f_188: 8151.40617 [0.0575732, -1.03868, -1.05934, 0.32104, -2.10582, 0.187935, 1.33934, 0.495326] +f_189: 8151.40616 [0.0575754, -1.03869, -1.0592, 0.321049, -2.10585, 0.187797, 1.33941, 0.495358] +f_190: 8151.40616 [0.0575808, -1.03868, -1.0592, 0.321171, -2.10577, 0.187642, 1.33954, 0.495311] +f_191: 8151.40757 [0.0573402, -1.03869, -1.05921, 0.321017, -2.10586, 0.187746, 1.33939, 0.49537] +f_192: 8151.40616 [0.0575679, -1.03869, -1.05927, 0.321096, -2.10595, 0.187799, 1.33944, 0.495396] +f_193: 8151.40616 [0.0575684, -1.03869, -1.05928, 0.321103, -2.10602, 0.187795, 1.33946, 0.495405] +f_194: 8151.40616 [0.0575714, -1.03869, -1.05928, 0.321039, -2.1059, 0.187784, 1.33946, 0.495403] +f_195: 8151.40616 [0.0575705, -1.03869, -1.05927, 0.321052, -2.10586, 0.187739, 1.33944, 0.495396] +f_196: 8151.40615 [0.0575739, -1.0387, -1.05925, 0.321051, -2.10583, 0.187764, 1.33947, 0.49537] +f_197: 8151.40614 [0.0575685, -1.0387, -1.05922, 0.321075, -2.10571, 0.187801, 1.33952, 0.495354] +f_198: 8151.40614 [0.0575687, -1.0387, -1.05921, 0.321136, -2.10543, 0.187832, 1.33962, 0.495364] +f_199: 8151.40614 [0.057573, -1.0387, -1.0592, 0.321038, -2.10535, 0.187764, 1.33959, 0.495341] +f_200: 8151.40613 [0.0575662, -1.03871, -1.05922, 0.321029, -2.10536, 0.18779, 1.33962, 0.495267] +f_201: 8151.40613 [0.057571, -1.03871, -1.05921, 0.321097, -2.10532, 0.187685, 1.33961, 0.495272] +f_202: 8151.40613 [0.0575647, -1.03871, -1.05922, 0.321058, -2.1053, 0.187813, 1.33962, 0.495263] +f_203: 8151.40612 [0.0575646, -1.03871, -1.05923, 0.321043, -2.10524, 0.187852, 1.3396, 0.49527] +f_204: 8151.40612 [0.0575608, -1.03871, -1.05919, 0.321065, -2.1052, 0.187886, 1.3396, 0.495258] +f_205: 8151.40612 [0.0575605, -1.03871, -1.05921, 0.32106, -2.10514, 0.187913, 1.33962, 0.495262] +f_206: 8151.40623 [0.0576134, -1.03871, -1.05919, 0.321113, -2.1052, 0.187902, 1.3396, 0.495288] +f_207: 8151.40612 [0.0575537, -1.0387, -1.05921, 0.321067, -2.10518, 0.187867, 1.33958, 0.495229] +f_208: 8151.40612 [0.0575671, -1.03871, -1.05919, 0.321102, -2.10514, 0.187881, 1.33959, 0.495247] +f_209: 8151.40612 [0.0575531, -1.03871, -1.05917, 0.321001, -2.10516, 0.187864, 1.33961, 0.495254] +f_210: 8151.40611 [0.05755, -1.03871, -1.05915, 0.32102, -2.10512, 0.187877, 1.3396, 0.495285] +f_211: 8151.40611 [0.0575514, -1.03871, -1.05915, 0.320987, -2.10507, 0.187872, 1.33959, 0.495281] +f_212: 8151.40611 [0.0575494, -1.03872, -1.05913, 0.320997, -2.10502, 0.187882, 1.33959, 0.495288] +f_213: 8151.4061 [0.0575524, -1.03873, -1.05914, 0.320982, -2.10491, 0.187861, 1.33957, 0.495278] +f_214: 8151.40609 [0.0575481, -1.03873, -1.05912, 0.320984, -2.10482, 0.187888, 1.33955, 0.495308] +f_215: 8151.40609 [0.0575444, -1.03875, -1.05917, 0.320865, -2.10456, 0.187902, 1.33959, 0.495298] +f_216: 8151.40608 [0.0575342, -1.03876, -1.05903, 0.321038, -2.1045, 0.187935, 1.3395, 0.495251] +f_217: 8151.40607 [0.0575464, -1.03878, -1.0591, 0.320965, -2.10446, 0.187953, 1.33947, 0.49525] +f_218: 8151.40607 [0.057538, -1.03878, -1.05906, 0.320971, -2.10413, 0.188014, 1.33949, 0.495242] +f_219: 8151.40608 [0.0575572, -1.03878, -1.05915, 0.321034, -2.10442, 0.187859, 1.33944, 0.49528] +f_220: 8151.40606 [0.0575534, -1.03878, -1.05901, 0.320914, -2.10447, 0.187898, 1.3395, 0.495254] +f_221: 8151.40637 [0.0574342, -1.03878, -1.05902, 0.320917, -2.10445, 0.187884, 1.33952, 0.49525] +f_222: 8151.40606 [0.0575421, -1.03878, -1.05902, 0.320961, -2.10446, 0.187888, 1.33954, 0.495237] +f_223: 8151.40606 [0.0575395, -1.03878, -1.059, 0.320957, -2.10443, 0.187846, 1.33951, 0.495217] +f_224: 8151.40606 [0.0575504, -1.03878, -1.05901, 0.320959, -2.1044, 0.187894, 1.33954, 0.495231] +f_225: 8151.40606 [0.0575399, -1.03878, -1.05899, 0.320993, -2.10446, 0.187881, 1.33953, 0.495273] +f_226: 8151.40606 [0.0575432, -1.03879, -1.05899, 0.320941, -2.10444, 0.187836, 1.33953, 0.495276] +f_227: 8151.40606 [0.0575209, -1.03879, -1.05901, 0.320971, -2.10446, 0.187914, 1.3395, 0.49527] +f_228: 8151.40605 [0.0575428, -1.03879, -1.05898, 0.320981, -2.10448, 0.187898, 1.3395, 0.495252] +f_229: 8151.40605 [0.0575411, -1.03879, -1.05901, 0.320991, -2.10447, 0.18792, 1.33948, 0.495258] +f_230: 8151.40605 [0.0575418, -1.03879, -1.05897, 0.320949, -2.10444, 0.187947, 1.33948, 0.495275] +f_231: 8151.40605 [0.0575404, -1.0388, -1.05896, 0.320901, -2.10441, 0.187952, 1.33947, 0.495286] +f_232: 8151.40604 [0.0575332, -1.0388, -1.05893, 0.320885, -2.10431, 0.187986, 1.33946, 0.495314] +f_233: 8151.40603 [0.0575351, -1.03882, -1.05884, 0.320773, -2.10412, 0.187986, 1.33944, 0.495319] +f_234: 8151.40602 [0.0575322, -1.03883, -1.05878, 0.320796, -2.10403, 0.18815, 1.33941, 0.495287] +f_235: 8151.406 [0.0575229, -1.03886, -1.05854, 0.320718, -2.10375, 0.188408, 1.33934, 0.495254] +f_236: 8151.40599 [0.0575106, -1.03893, -1.05828, 0.320637, -2.10305, 0.188364, 1.33925, 0.49505] +f_237: 8151.40601 [0.0575272, -1.03899, -1.05838, 0.320265, -2.10267, 0.187835, 1.33917, 0.495406] +f_238: 8151.40602 [0.0575058, -1.03892, -1.05841, 0.320669, -2.10271, 0.188455, 1.33927, 0.49513] +f_239: 8151.40598 [0.0575103, -1.03895, -1.05827, 0.320331, -2.10313, 0.188673, 1.33924, 0.495281] +f_240: 8151.40599 [0.0574946, -1.03894, -1.05825, 0.320176, -2.10308, 0.189172, 1.33925, 0.495418] +f_241: 8151.40633 [0.0575992, -1.03895, -1.05831, 0.320566, -2.10321, 0.188712, 1.33912, 0.495317] +f_242: 8151.40597 [0.0575054, -1.03894, -1.05818, 0.320207, -2.10307, 0.188631, 1.3394, 0.495281] +f_243: 8151.40599 [0.0575039, -1.03894, -1.05814, 0.319971, -2.10289, 0.188645, 1.33931, 0.495254] +f_244: 8151.4072 [0.0577, -1.03895, -1.05823, 0.320198, -2.10306, 0.188654, 1.33943, 0.495248] +f_245: 8151.40596 [0.0574878, -1.03894, -1.05811, 0.320248, -2.10305, 0.188602, 1.33935, 0.495333] +f_246: 8151.40598 [0.0574546, -1.03895, -1.05808, 0.320279, -2.10303, 0.18863, 1.33935, 0.495314] +f_247: 8151.40596 [0.0574846, -1.03894, -1.05814, 0.320218, -2.10303, 0.188615, 1.33934, 0.495297] +f_248: 8151.40596 [0.0574865, -1.03895, -1.05808, 0.32023, -2.10301, 0.188621, 1.33935, 0.495336] +f_249: 8151.40595 [0.0574828, -1.03895, -1.05811, 0.320222, -2.10305, 0.188656, 1.33937, 0.49535] +f_250: 8151.40595 [0.0574845, -1.03895, -1.05814, 0.320218, -2.10303, 0.188652, 1.3394, 0.495359] +f_251: 8151.40601 [0.0574381, -1.03895, -1.05812, 0.320257, -2.10307, 0.18864, 1.33936, 0.495342] +f_252: 8151.40595 [0.0574866, -1.03895, -1.05811, 0.320186, -2.10311, 0.18865, 1.33939, 0.495342] +f_253: 8151.40594 [0.0574894, -1.03896, -1.0581, 0.320136, -2.10322, 0.188657, 1.3394, 0.495333] +f_254: 8151.40592 [0.0574977, -1.03898, -1.05814, 0.32014, -2.10338, 0.188707, 1.33936, 0.495306] +f_255: 8151.4059 [0.0575052, -1.03899, -1.05824, 0.320086, -2.1036, 0.188669, 1.33943, 0.495229] +f_256: 8151.4059 [0.0575097, -1.039, -1.05833, 0.320207, -2.10371, 0.188749, 1.33931, 0.495244] +f_257: 8151.4059 [0.0575182, -1.03901, -1.05835, 0.320232, -2.10378, 0.188745, 1.33939, 0.495097] +f_258: 8151.40589 [0.0575274, -1.03901, -1.05837, 0.32026, -2.10408, 0.188678, 1.33937, 0.495155] +f_259: 8151.40589 [0.0575478, -1.03902, -1.05854, 0.320675, -2.10448, 0.188468, 1.33932, 0.495085] +f_260: 8151.40587 [0.0575278, -1.03903, -1.05835, 0.320214, -2.10412, 0.188705, 1.33937, 0.495146] +f_261: 8151.40588 [0.0575311, -1.03903, -1.05831, 0.319953, -2.10423, 0.188831, 1.33937, 0.495102] +f_262: 8151.40627 [0.0573776, -1.03903, -1.05831, 0.320246, -2.10416, 0.188728, 1.33942, 0.495194] +f_263: 8151.40585 [0.0575057, -1.03903, -1.05831, 0.320285, -2.10415, 0.188779, 1.33942, 0.495185] +f_264: 8151.40585 [0.0574974, -1.03903, -1.05821, 0.320299, -2.1042, 0.188733, 1.3394, 0.495181] +f_265: 8151.40588 [0.0575388, -1.03903, -1.0582, 0.320292, -2.10422, 0.188703, 1.33943, 0.495173] +f_266: 8151.40584 [0.0575023, -1.03903, -1.05821, 0.320315, -2.10425, 0.188744, 1.33939, 0.495206] +f_267: 8151.40584 [0.057501, -1.03903, -1.05821, 0.32033, -2.10429, 0.188749, 1.33938, 0.495182] +f_268: 8151.40584 [0.0575041, -1.03904, -1.05821, 0.32032, -2.10434, 0.188747, 1.33938, 0.4952] +f_269: 8151.40583 [0.0575036, -1.03904, -1.05822, 0.32037, -2.10441, 0.188756, 1.33938, 0.4952] +f_270: 8151.40583 [0.0575069, -1.03904, -1.05822, 0.320436, -2.10453, 0.188761, 1.33935, 0.495179] +f_271: 8151.40582 [0.0575065, -1.03905, -1.0582, 0.320482, -2.10457, 0.188738, 1.33939, 0.495186] +f_272: 8151.40581 [0.0575116, -1.03907, -1.05813, 0.320482, -2.10473, 0.188675, 1.33941, 0.495208] +f_273: 8151.4058 [0.0575236, -1.03911, -1.05823, 0.320371, -2.10511, 0.188573, 1.33943, 0.495206] +f_274: 8151.40578 [0.057523, -1.03914, -1.05821, 0.320017, -2.10502, 0.188665, 1.33942, 0.495122] +f_275: 8151.4058 [0.0575306, -1.03915, -1.05845, 0.319503, -2.10523, 0.188706, 1.33945, 0.49523] +f_276: 8151.4066 [0.0576863, -1.03914, -1.05828, 0.320169, -2.10503, 0.188855, 1.33945, 0.495206] +f_277: 8151.40579 [0.0575193, -1.03914, -1.0582, 0.320056, -2.10513, 0.188746, 1.33954, 0.494988] +f_278: 8151.40693 [0.0577267, -1.03914, -1.05825, 0.320045, -2.10498, 0.188619, 1.33941, 0.495085] +f_279: 8151.40577 [0.0575198, -1.03915, -1.05813, 0.320047, -2.10506, 0.188693, 1.33948, 0.495132] +f_280: 8151.40578 [0.0575128, -1.03915, -1.05806, 0.320063, -2.10509, 0.188702, 1.33943, 0.495083] +f_281: 8151.40589 [0.0574527, -1.03915, -1.05815, 0.320094, -2.10505, 0.188685, 1.33948, 0.495129] +f_282: 8151.40577 [0.057517, -1.03915, -1.05813, 0.32009, -2.10503, 0.18867, 1.33949, 0.495122] +f_283: 8151.40577 [0.0575204, -1.03915, -1.05815, 0.320125, -2.10508, 0.188655, 1.33948, 0.495122] +f_284: 8151.40576 [0.0575181, -1.03916, -1.05816, 0.320174, -2.10506, 0.188667, 1.33948, 0.495112] +f_285: 8151.40576 [0.0575182, -1.03917, -1.05819, 0.320226, -2.10505, 0.188701, 1.33947, 0.495072] +f_286: 8151.40575 [0.0575201, -1.03918, -1.05825, 0.320257, -2.10517, 0.188676, 1.33947, 0.494996] +f_287: 8151.40574 [0.0575252, -1.03919, -1.05827, 0.320524, -2.1053, 0.188566, 1.33945, 0.495035] +f_288: 8151.40574 [0.0575337, -1.03919, -1.05828, 0.32066, -2.10536, 0.188353, 1.33946, 0.495116] +f_289: 8151.40575 [0.0575332, -1.03919, -1.05827, 0.320956, -2.10552, 0.188291, 1.33945, 0.495112] +f_290: 8151.40579 [0.0574983, -1.03919, -1.05826, 0.320601, -2.10549, 0.188344, 1.3395, 0.49513] +f_291: 8151.40574 [0.0575304, -1.0392, -1.05832, 0.320711, -2.10524, 0.188378, 1.33943, 0.495123] +f_292: 8151.40573 [0.0575268, -1.03921, -1.05839, 0.320804, -2.10507, 0.188255, 1.3394, 0.495093] +f_293: 8151.40574 [0.0575354, -1.03922, -1.05852, 0.320843, -2.10524, 0.188106, 1.33938, 0.495141] +f_294: 8151.40609 [0.0576498, -1.03921, -1.05844, 0.320833, -2.10501, 0.188225, 1.33938, 0.495124] +f_295: 8151.40573 [0.0575302, -1.03921, -1.05843, 0.320872, -2.10498, 0.18825, 1.33941, 0.495151] +f_296: 8151.40573 [0.0575297, -1.03921, -1.05843, 0.320878, -2.105, 0.188243, 1.33941, 0.495152] +f_297: 8151.40573 [0.0575347, -1.03921, -1.05842, 0.320887, -2.10499, 0.188247, 1.33937, 0.49516] +f_298: 8151.40572 [0.0575296, -1.03922, -1.05843, 0.320944, -2.10499, 0.188221, 1.33941, 0.495149] +f_299: 8151.40572 [0.0575336, -1.03922, -1.05846, 0.320925, -2.10497, 0.188245, 1.3394, 0.49513] +f_300: 8151.40572 [0.0575304, -1.03922, -1.05845, 0.320885, -2.10495, 0.188223, 1.3394, 0.495129] +f_301: 8151.40571 [0.0575331, -1.03923, -1.05845, 0.320876, -2.10496, 0.188263, 1.33939, 0.495131] +f_302: 8151.40571 [0.0575195, -1.03924, -1.05844, 0.320845, -2.10498, 0.188331, 1.33939, 0.495117] +f_303: 8151.4057 [0.0575306, -1.03925, -1.05842, 0.320803, -2.10499, 0.18839, 1.3394, 0.495118] +f_304: 8151.40569 [0.0575254, -1.03926, -1.05839, 0.320634, -2.105, 0.188493, 1.33939, 0.495072] +f_305: 8151.40567 [0.0575245, -1.03927, -1.05822, 0.320582, -2.10511, 0.188621, 1.33944, 0.495109] +f_306: 8151.40567 [0.057518, -1.03927, -1.0581, 0.320423, -2.10502, 0.188807, 1.3395, 0.495126] +f_307: 8151.40568 [0.057515, -1.03927, -1.05791, 0.320269, -2.10512, 0.188855, 1.33954, 0.495098] +f_308: 8151.40566 [0.0575164, -1.03928, -1.05809, 0.320365, -2.10496, 0.188864, 1.3395, 0.495128] +f_309: 8151.40566 [0.0575132, -1.03929, -1.05813, 0.320338, -2.10504, 0.188922, 1.33945, 0.49513] +f_310: 8151.40565 [0.0575161, -1.03929, -1.05817, 0.320301, -2.105, 0.188882, 1.3395, 0.495135] +f_311: 8151.40565 [0.0575146, -1.0393, -1.05819, 0.32028, -2.10496, 0.188887, 1.33943, 0.495136] +f_312: 8151.40564 [0.05751, -1.03931, -1.05821, 0.320361, -2.10494, 0.188941, 1.33948, 0.495102] +f_313: 8151.40563 [0.0575109, -1.03932, -1.05821, 0.320401, -2.10493, 0.188981, 1.33944, 0.495101] +f_314: 8151.40563 [0.0575134, -1.03933, -1.05819, 0.320427, -2.10487, 0.189017, 1.33948, 0.494982] +f_315: 8151.40561 [0.057504, -1.03935, -1.05827, 0.320449, -2.10476, 0.189118, 1.33944, 0.495026] +f_316: 8151.40558 [0.0575108, -1.03939, -1.05837, 0.320442, -2.10476, 0.189262, 1.33939, 0.495086] +f_317: 8151.40553 [0.057519, -1.03945, -1.05901, 0.320295, -2.10521, 0.189393, 1.33946, 0.495148] +f_318: 8151.40551 [0.0575408, -1.03951, -1.05951, 0.320734, -2.10565, 0.189097, 1.33955, 0.495383] +f_319: 8151.40555 [0.0575464, -1.03954, -1.0603, 0.320019, -2.10554, 0.189165, 1.33955, 0.495287] +f_320: 8151.41105 [0.0579784, -1.03951, -1.05955, 0.320916, -2.1057, 0.189125, 1.33964, 0.49511] +f_321: 8151.40554 [0.0575446, -1.03952, -1.05936, 0.320444, -2.10567, 0.189242, 1.33938, 0.495652] +f_322: 8151.40835 [0.0572029, -1.03952, -1.05952, 0.3208, -2.10577, 0.18924, 1.33952, 0.495332] +f_323: 8151.4055 [0.0575315, -1.03952, -1.05946, 0.320746, -2.10577, 0.189303, 1.33946, 0.495386] +f_324: 8151.40549 [0.0575219, -1.03953, -1.05934, 0.320651, -2.10572, 0.18927, 1.33957, 0.495359] +f_325: 8151.40548 [0.0575198, -1.03954, -1.05929, 0.320672, -2.10556, 0.189287, 1.33943, 0.495367] +f_326: 8151.40546 [0.0575086, -1.03955, -1.05919, 0.320655, -2.10547, 0.189446, 1.33949, 0.495412] +f_327: 8151.40545 [0.0575184, -1.03956, -1.05903, 0.320504, -2.10559, 0.18942, 1.33946, 0.495375] +f_328: 8151.40544 [0.0575052, -1.03958, -1.05895, 0.320399, -2.10552, 0.189563, 1.33951, 0.49545] +f_329: 8151.40543 [0.0574966, -1.03958, -1.05881, 0.32063, -2.10553, 0.189639, 1.33946, 0.495392] +f_330: 8151.40542 [0.0574951, -1.03959, -1.05874, 0.320652, -2.10531, 0.189554, 1.33944, 0.495302] +f_331: 8151.40541 [0.0574858, -1.0396, -1.05862, 0.320799, -2.10524, 0.189653, 1.33939, 0.495303] +f_332: 8151.4054 [0.0574818, -1.03962, -1.05845, 0.320885, -2.10528, 0.189736, 1.33942, 0.49524] +f_333: 8151.40538 [0.05747, -1.03965, -1.0581, 0.320922, -2.10517, 0.189812, 1.33937, 0.495272] +f_334: 8151.40536 [0.0574516, -1.03971, -1.05749, 0.321331, -2.10524, 0.19004, 1.33936, 0.495184] +f_335: 8151.40529 [0.0574442, -1.03977, -1.05769, 0.320579, -2.1049, 0.190371, 1.33931, 0.495173] +f_336: 8151.40526 [0.0574303, -1.0399, -1.05833, 0.319197, -2.10477, 0.191228, 1.33927, 0.494999] +f_337: 8151.4051 [0.0574263, -1.04005, -1.0586, 0.32051, -2.10534, 0.191416, 1.33925, 0.495066] +f_338: 8151.40499 [0.0573845, -1.04037, -1.05848, 0.318815, -2.10505, 0.192642, 1.33908, 0.495279] +f_339: 8151.40499 [0.0573599, -1.04064, -1.05907, 0.321325, -2.10491, 0.193721, 1.33913, 0.494082] +f_340: 8151.41729 [0.0567702, -1.04039, -1.05841, 0.318421, -2.10534, 0.191675, 1.33982, 0.494969] +f_341: 8151.40486 [0.0574029, -1.04048, -1.05894, 0.319766, -2.10654, 0.193344, 1.33945, 0.495048] +f_342: 8151.40482 [0.0574063, -1.04058, -1.06049, 0.319879, -2.10598, 0.193184, 1.33932, 0.494794] +f_343: 8151.40471 [0.0573652, -1.04071, -1.05923, 0.320286, -2.10628, 0.193875, 1.33922, 0.494896] +f_344: 8151.4046 [0.0573024, -1.04103, -1.05891, 0.319194, -2.10563, 0.19547, 1.33921, 0.494788] +f_345: 8151.40446 [0.0572865, -1.04137, -1.05952, 0.320231, -2.10626, 0.196091, 1.33903, 0.494467] +f_346: 8151.40444 [0.0572809, -1.04137, -1.05952, 0.320115, -2.10629, 0.196262, 1.33912, 0.494504] +f_347: 8151.40445 [0.0572431, -1.0416, -1.05953, 0.318769, -2.10585, 0.197444, 1.33907, 0.49443] +f_348: 8151.44643 [0.0559843, -1.04134, -1.05999, 0.320686, -2.10626, 0.197631, 1.3381, 0.494493] +f_349: 8151.40477 [0.057247, -1.04143, -1.06076, 0.321127, -2.10608, 0.197191, 1.3385, 0.494147] +f_350: 8151.41786 [0.0565672, -1.04138, -1.05973, 0.320406, -2.10554, 0.196118, 1.33925, 0.495042] +f_351: 8151.40455 [0.0572709, -1.04137, -1.05949, 0.319789, -2.1075, 0.196732, 1.33907, 0.494436] +f_352: 8151.42562 [0.0581498, -1.04137, -1.05946, 0.320178, -2.1063, 0.196326, 1.33901, 0.494703] +f_353: 8151.4045 [0.0572547, -1.04137, -1.05935, 0.320182, -2.10634, 0.196544, 1.33883, 0.494624] +f_354: 8151.40448 [0.0572449, -1.04137, -1.05969, 0.319919, -2.10632, 0.196228, 1.33905, 0.494545] +f_355: 8151.40442 [0.0572786, -1.04138, -1.05948, 0.320136, -2.10651, 0.196159, 1.33921, 0.494503] +f_356: 8151.40441 [0.0572876, -1.0414, -1.0595, 0.320174, -2.1066, 0.196021, 1.33928, 0.494443] +f_357: 8151.40437 [0.0572883, -1.0414, -1.05936, 0.320111, -2.10666, 0.195908, 1.33932, 0.494575] +f_358: 8151.40435 [0.0573047, -1.0414, -1.05906, 0.319886, -2.10667, 0.195529, 1.33943, 0.494607] +f_359: 8151.4043 [0.0573152, -1.04141, -1.05914, 0.32021, -2.10685, 0.195154, 1.33952, 0.494781] +f_360: 8151.40429 [0.0573274, -1.04143, -1.05902, 0.320402, -2.10706, 0.194785, 1.33966, 0.494842] +f_361: 8151.4043 [0.0573434, -1.0414, -1.05913, 0.32021, -2.10703, 0.194471, 1.33984, 0.494924] +f_362: 8151.40427 [0.057339, -1.04145, -1.05903, 0.320443, -2.10709, 0.19485, 1.33966, 0.494839] +f_363: 8151.40426 [0.0573387, -1.04146, -1.05917, 0.3203, -2.10712, 0.194789, 1.33959, 0.49487] +f_364: 8151.40426 [0.0573331, -1.04147, -1.05923, 0.320168, -2.10721, 0.194884, 1.33966, 0.49491] +f_365: 8151.40438 [0.0572807, -1.04147, -1.05916, 0.320113, -2.10733, 0.194888, 1.33965, 0.494889] +f_366: 8151.40424 [0.0573334, -1.04148, -1.05926, 0.320208, -2.10706, 0.194865, 1.33967, 0.494917] +f_367: 8151.40423 [0.0573396, -1.04148, -1.05935, 0.320311, -2.10708, 0.194855, 1.33963, 0.49493] +f_368: 8151.40422 [0.0573426, -1.04149, -1.05938, 0.320326, -2.10704, 0.1948, 1.33956, 0.494929] +f_369: 8151.40421 [0.0573427, -1.0415, -1.05949, 0.320548, -2.10701, 0.194728, 1.33951, 0.494937] +f_370: 8151.40422 [0.0573336, -1.04151, -1.05968, 0.320945, -2.10735, 0.194725, 1.33945, 0.495106] +f_371: 8151.40749 [0.0576927, -1.0415, -1.05934, 0.320552, -2.10709, 0.194786, 1.33952, 0.494973] +f_372: 8151.40422 [0.0573406, -1.04151, -1.0595, 0.320574, -2.10713, 0.19482, 1.33948, 0.49483] +f_373: 8151.4042 [0.0573464, -1.0415, -1.05944, 0.320526, -2.10705, 0.194631, 1.33955, 0.494968] +f_374: 8151.40419 [0.0573497, -1.0415, -1.05942, 0.320499, -2.10701, 0.194644, 1.33952, 0.49505] +f_375: 8151.40418 [0.0573551, -1.04151, -1.05944, 0.32045, -2.10697, 0.194588, 1.33951, 0.495099] +f_376: 8151.40416 [0.0573568, -1.04152, -1.05954, 0.320298, -2.10691, 0.194494, 1.33954, 0.495147] +f_377: 8151.40415 [0.0573559, -1.04154, -1.05951, 0.320328, -2.10677, 0.194379, 1.33948, 0.495197] +f_378: 8151.40412 [0.0573591, -1.04157, -1.05943, 0.320428, -2.10658, 0.193994, 1.33944, 0.495282] +f_379: 8151.40411 [0.05737, -1.04156, -1.05906, 0.320992, -2.10595, 0.19323, 1.33942, 0.4954] +f_380: 8151.40411 [0.0573867, -1.04156, -1.0588, 0.320638, -2.10605, 0.193232, 1.33957, 0.495556] +f_381: 8151.40411 [0.057385, -1.04158, -1.0586, 0.32094, -2.10627, 0.192982, 1.33968, 0.495473] +f_382: 8151.40411 [0.0573949, -1.04155, -1.05875, 0.320745, -2.10606, 0.192991, 1.3396, 0.495621] +f_383: 8151.40409 [0.0573742, -1.04157, -1.05871, 0.320607, -2.10583, 0.193212, 1.33958, 0.495443] +f_384: 8151.4041 [0.0573623, -1.04158, -1.05854, 0.32073, -2.1058, 0.193266, 1.33951, 0.495485] +f_385: 8151.40416 [0.0574396, -1.04157, -1.05875, 0.320654, -2.10578, 0.193242, 1.33964, 0.495483] +f_386: 8151.4041 [0.0573618, -1.04157, -1.05875, 0.320656, -2.10578, 0.193249, 1.33964, 0.495485] +f_387: 8151.40411 [0.0573615, -1.04157, -1.05869, 0.320614, -2.10583, 0.193161, 1.33959, 0.495455] +f_388: 8151.40409 [0.0573865, -1.04157, -1.05869, 0.320591, -2.10577, 0.193219, 1.3396, 0.495465] +f_389: 8151.40408 [0.0573828, -1.04157, -1.05869, 0.32064, -2.10576, 0.193209, 1.33961, 0.495438] +f_390: 8151.40408 [0.057387, -1.04157, -1.05869, 0.320681, -2.10577, 0.193197, 1.33961, 0.495435] +f_391: 8151.40408 [0.0573838, -1.04158, -1.05868, 0.32077, -2.1058, 0.19318, 1.33961, 0.495452] +f_392: 8151.40407 [0.0573893, -1.04159, -1.05864, 0.320839, -2.1058, 0.193096, 1.33961, 0.495471] +f_393: 8151.40407 [0.05739, -1.0416, -1.05864, 0.320795, -2.10583, 0.193102, 1.3396, 0.495508] +f_394: 8151.40406 [0.0573966, -1.04162, -1.05865, 0.320753, -2.10596, 0.193078, 1.33963, 0.495534] +f_395: 8151.40406 [0.0574103, -1.04165, -1.05877, 0.320689, -2.10624, 0.193018, 1.33967, 0.49567] +f_396: 8151.4043 [0.0575061, -1.04165, -1.05868, 0.320729, -2.10628, 0.192763, 1.33968, 0.495664] +f_397: 8151.40405 [0.0573954, -1.04166, -1.05874, 0.320639, -2.10616, 0.193259, 1.3397, 0.495607] +f_398: 8151.40404 [0.0573906, -1.04167, -1.05868, 0.320504, -2.10614, 0.193427, 1.33961, 0.495555] +f_399: 8151.40404 [0.0573864, -1.04168, -1.0585, 0.320388, -2.10614, 0.193522, 1.33961, 0.495542] +f_400: 8151.40417 [0.0573102, -1.04168, -1.05848, 0.320466, -2.10605, 0.193463, 1.33958, 0.495549] +f_401: 8151.40403 [0.0573814, -1.04168, -1.05849, 0.320491, -2.10604, 0.193497, 1.33958, 0.495527] +f_402: 8151.40403 [0.0573804, -1.04169, -1.05852, 0.320456, -2.10592, 0.193438, 1.33957, 0.495533] +f_403: 8151.40402 [0.0573814, -1.0417, -1.05853, 0.32051, -2.10585, 0.193408, 1.33954, 0.495559] +f_404: 8151.40401 [0.0573817, -1.04171, -1.05854, 0.320528, -2.10583, 0.193363, 1.33954, 0.495494] +f_405: 8151.40399 [0.0573875, -1.04173, -1.05856, 0.320585, -2.10586, 0.1933, 1.33956, 0.495444] +f_406: 8151.40399 [0.0573964, -1.04177, -1.05854, 0.320549, -2.10597, 0.193325, 1.33949, 0.495595] +f_407: 8151.40394 [0.0573903, -1.0418, -1.05849, 0.320578, -2.1057, 0.193196, 1.33962, 0.495462] +f_408: 8151.4039 [0.0573923, -1.04188, -1.05817, 0.320564, -2.10564, 0.193081, 1.33955, 0.495497] +f_409: 8151.40384 [0.0573947, -1.04196, -1.05868, 0.320332, -2.10568, 0.193306, 1.3395, 0.495394] +f_410: 8151.4038 [0.0573765, -1.04203, -1.05892, 0.320601, -2.10578, 0.193817, 1.33943, 0.495274] +f_411: 8151.40377 [0.0573872, -1.0421, -1.05904, 0.319884, -2.10592, 0.193882, 1.33954, 0.495195] +f_412: 8151.40372 [0.0573899, -1.04216, -1.05916, 0.320633, -2.10541, 0.193574, 1.33934, 0.495219] +f_413: 8151.40367 [0.057403, -1.04224, -1.05892, 0.320532, -2.10576, 0.193423, 1.33938, 0.49522] +f_414: 8151.40357 [0.0574064, -1.04242, -1.05861, 0.320394, -2.10541, 0.193295, 1.33941, 0.495243] +f_415: 8151.4034 [0.0574225, -1.04276, -1.05782, 0.319764, -2.10496, 0.192966, 1.3394, 0.495275] +f_416: 8151.40314 [0.0575117, -1.04346, -1.05697, 0.319408, -2.10497, 0.191532, 1.33982, 0.49566] +f_417: 8151.40293 [0.057443, -1.04373, -1.05813, 0.319728, -2.10469, 0.193276, 1.3393, 0.494716] +f_418: 8151.40283 [0.0575444, -1.04423, -1.05628, 0.319002, -2.10417, 0.190949, 1.33989, 0.495281] +f_419: 8151.40266 [0.0574628, -1.04442, -1.05775, 0.319216, -2.10409, 0.193199, 1.3392, 0.494663] +f_420: 8151.40257 [0.0575419, -1.04499, -1.05567, 0.318547, -2.10359, 0.191402, 1.33967, 0.495078] +f_421: 8151.40246 [0.0574871, -1.04515, -1.05765, 0.31877, -2.10307, 0.192867, 1.33915, 0.494488] +f_422: 8151.40256 [0.0574732, -1.04569, -1.05745, 0.317565, -2.10314, 0.193858, 1.33901, 0.494046] +f_423: 8152.2157 [0.0628744, -1.04515, -1.05747, 0.319748, -2.10308, 0.192791, 1.33781, 0.494127] +f_424: 8151.40235 [0.0573693, -1.04537, -1.05632, 0.318856, -2.10161, 0.193395, 1.33956, 0.494771] +f_425: 8151.40241 [0.0574297, -1.04549, -1.05523, 0.319963, -2.10026, 0.190837, 1.33922, 0.494903] +f_426: 8151.40297 [0.0572803, -1.04537, -1.05597, 0.319542, -2.10186, 0.193296, 1.34, 0.493717] +f_427: 8151.40233 [0.0574366, -1.04542, -1.0566, 0.318426, -2.10324, 0.192752, 1.33986, 0.49453] +f_428: 8151.41436 [0.0567693, -1.04543, -1.05646, 0.31913, -2.10369, 0.19292, 1.33965, 0.494679] +f_429: 8151.40246 [0.0574108, -1.04542, -1.05629, 0.318053, -2.10251, 0.192798, 1.33959, 0.494344] +f_430: 8151.41522 [0.0567658, -1.04543, -1.05671, 0.318432, -2.10307, 0.192609, 1.33994, 0.494515] +f_431: 8151.40226 [0.0574406, -1.04542, -1.05656, 0.318752, -2.10336, 0.192879, 1.33965, 0.494549] +f_432: 8151.40225 [0.0574183, -1.04542, -1.05643, 0.318742, -2.10351, 0.19324, 1.33973, 0.494628] +f_433: 8151.40225 [0.0574183, -1.04543, -1.05642, 0.318735, -2.10351, 0.193242, 1.33973, 0.494624] +f_434: 8151.40224 [0.057433, -1.04544, -1.05633, 0.318815, -2.10364, 0.193108, 1.33969, 0.494636] +f_435: 8151.40221 [0.0574302, -1.04544, -1.05641, 0.319021, -2.10363, 0.193055, 1.33974, 0.494675] +f_436: 8151.40218 [0.0574391, -1.04544, -1.05649, 0.319181, -2.10372, 0.192954, 1.33968, 0.494683] +f_437: 8151.40213 [0.0574406, -1.04545, -1.05652, 0.319641, -2.10374, 0.192799, 1.33971, 0.494777] +f_438: 8151.40208 [0.0574588, -1.04546, -1.05641, 0.320464, -2.10408, 0.192384, 1.33962, 0.494891] +f_439: 8151.40211 [0.0574558, -1.04547, -1.05587, 0.320712, -2.10356, 0.191992, 1.33959, 0.495019] +f_440: 8151.40211 [0.0574646, -1.04543, -1.05636, 0.32033, -2.10404, 0.192315, 1.33963, 0.494904] +f_441: 8151.40206 [0.0574674, -1.04546, -1.05675, 0.320692, -2.10416, 0.19244, 1.3396, 0.494939] +f_442: 8151.40207 [0.0574594, -1.04546, -1.05692, 0.320799, -2.10452, 0.192574, 1.33959, 0.494781] +f_443: 8151.4034 [0.0576735, -1.04545, -1.05678, 0.320656, -2.10412, 0.192453, 1.33953, 0.494908] +f_444: 8151.40204 [0.0574627, -1.04547, -1.05681, 0.320612, -2.10426, 0.192378, 1.33968, 0.495015] +f_445: 8151.40205 [0.0574564, -1.04547, -1.05678, 0.32077, -2.10425, 0.192451, 1.33979, 0.495008] +f_446: 8151.40205 [0.0574624, -1.04546, -1.05679, 0.320686, -2.10429, 0.192342, 1.3397, 0.495038] +f_447: 8151.40204 [0.0574617, -1.04547, -1.05683, 0.320673, -2.10426, 0.192368, 1.33971, 0.49499] +f_448: 8151.40204 [0.057463, -1.04548, -1.05681, 0.320689, -2.10431, 0.192435, 1.33968, 0.495026] +f_449: 8151.40204 [0.0574569, -1.04548, -1.05685, 0.320674, -2.10433, 0.192514, 1.33969, 0.495012] +f_450: 8151.40203 [0.0574568, -1.04548, -1.05687, 0.320646, -2.10437, 0.192545, 1.33965, 0.494978] +f_451: 8151.40203 [0.0574539, -1.04549, -1.05692, 0.320647, -2.10434, 0.192604, 1.33965, 0.494972] +f_452: 8151.40202 [0.0574493, -1.0455, -1.05705, 0.320694, -2.10425, 0.192714, 1.33964, 0.494967] +f_453: 8151.40201 [0.0574365, -1.04551, -1.05726, 0.320747, -2.10413, 0.192943, 1.33952, 0.494874] +f_454: 8151.40199 [0.0574442, -1.04553, -1.05743, 0.320383, -2.10419, 0.192976, 1.33957, 0.494947] +f_455: 8151.40198 [0.0574407, -1.04559, -1.05749, 0.320034, -2.10396, 0.193078, 1.33955, 0.494914] +f_456: 8151.40205 [0.0574221, -1.04561, -1.05731, 0.319378, -2.10321, 0.193189, 1.33951, 0.49485] +f_457: 8151.40196 [0.0574417, -1.0456, -1.0577, 0.32004, -2.10431, 0.193286, 1.33958, 0.494915] +f_458: 8151.40196 [0.0574583, -1.04562, -1.05793, 0.320091, -2.10461, 0.193151, 1.33957, 0.494848] +f_459: 8151.40194 [0.0574666, -1.04563, -1.05802, 0.320116, -2.10474, 0.193145, 1.33955, 0.4949] +f_460: 8151.40193 [0.0574541, -1.04563, -1.05795, 0.320141, -2.10479, 0.193245, 1.33945, 0.494969] +f_461: 8151.40192 [0.0574545, -1.04564, -1.058, 0.320189, -2.10468, 0.193246, 1.33954, 0.495062] +f_462: 8151.40191 [0.0574702, -1.04566, -1.05806, 0.320068, -2.10481, 0.193068, 1.33952, 0.495242] +f_463: 8151.4019 [0.0574611, -1.04568, -1.05815, 0.320437, -2.10475, 0.193106, 1.33943, 0.495164] +f_464: 8151.40188 [0.0574671, -1.0457, -1.0582, 0.320574, -2.10512, 0.193095, 1.3395, 0.495252] +f_465: 8151.40188 [0.0574644, -1.04572, -1.05844, 0.320551, -2.10516, 0.193131, 1.33942, 0.49537] +f_466: 8151.40186 [0.057471, -1.04575, -1.05834, 0.320725, -2.10529, 0.193129, 1.33947, 0.495308] +f_467: 8151.40186 [0.0574885, -1.0458, -1.05814, 0.321016, -2.10559, 0.192803, 1.33952, 0.495441] +f_468: 8151.40685 [0.0579173, -1.0458, -1.05827, 0.321156, -2.10581, 0.192701, 1.33952, 0.495259] +f_469: 8151.40184 [0.0574724, -1.04583, -1.05806, 0.320846, -2.10556, 0.193088, 1.33959, 0.495422] +f_470: 8151.40182 [0.057468, -1.04585, -1.0583, 0.320645, -2.10545, 0.193237, 1.33952, 0.495441] +f_471: 8151.4018 [0.0574603, -1.04588, -1.05879, 0.320262, -2.10545, 0.193677, 1.3395, 0.495312] +f_472: 8151.40182 [0.0574446, -1.04591, -1.05916, 0.319882, -2.10504, 0.194031, 1.33935, 0.495439] +f_473: 8151.4018 [0.057459, -1.04589, -1.05892, 0.320529, -2.10568, 0.193659, 1.33955, 0.495438] +f_474: 8151.4018 [0.0574728, -1.04592, -1.05894, 0.320243, -2.10587, 0.193618, 1.33956, 0.495492] +f_475: 8151.40201 [0.0575502, -1.04589, -1.05891, 0.320542, -2.10579, 0.193686, 1.33941, 0.495434] +f_476: 8151.40179 [0.0574592, -1.04591, -1.05891, 0.320624, -2.10566, 0.1937, 1.33949, 0.495375] +f_477: 8151.40179 [0.0574589, -1.04592, -1.059, 0.320594, -2.10569, 0.193771, 1.33947, 0.495408] +f_478: 8151.40178 [0.0574629, -1.04593, -1.05911, 0.320598, -2.10574, 0.193729, 1.33945, 0.495412] +f_479: 8151.40178 [0.0574605, -1.04595, -1.0591, 0.320605, -2.10584, 0.193805, 1.33947, 0.495413] +f_480: 8151.40176 [0.0574616, -1.04598, -1.05913, 0.320652, -2.106, 0.193897, 1.33951, 0.495352] +f_481: 8151.40174 [0.0574618, -1.04605, -1.0593, 0.3208, -2.10623, 0.194034, 1.33948, 0.495305] +f_482: 8151.40171 [0.0574628, -1.04617, -1.05988, 0.320629, -2.10662, 0.194437, 1.33949, 0.495338] +f_483: 8151.40164 [0.0574713, -1.04644, -1.06017, 0.321012, -2.10727, 0.194619, 1.33941, 0.495363] +f_484: 8151.40167 [0.0574415, -1.04693, -1.06248, 0.319831, -2.10783, 0.196617, 1.33913, 0.495074] +f_485: 8151.49384 [0.0556383, -1.04644, -1.05985, 0.320238, -2.10768, 0.194736, 1.33946, 0.495725] +f_486: 8151.40202 [0.057531, -1.04661, -1.06006, 0.32298, -2.10971, 0.194044, 1.33978, 0.49586] +f_487: 8151.4019 [0.0574512, -1.04643, -1.06032, 0.320794, -2.10807, 0.194388, 1.33885, 0.495065] +f_488: 8151.40173 [0.0574829, -1.0465, -1.06042, 0.319671, -2.10829, 0.195098, 1.33948, 0.494975] +f_489: 8151.43212 [0.0585176, -1.04644, -1.05975, 0.320848, -2.10772, 0.19485, 1.33942, 0.495539] +f_490: 8151.4017 [0.0574965, -1.04645, -1.06083, 0.320822, -2.10757, 0.194306, 1.33929, 0.495341] +f_491: 8151.40204 [0.0573409, -1.04643, -1.06028, 0.32112, -2.10749, 0.194947, 1.33948, 0.495304] +f_492: 8151.40167 [0.0574675, -1.04644, -1.06018, 0.321079, -2.1073, 0.194545, 1.33923, 0.495119] +f_493: 8151.40177 [0.05754, -1.04643, -1.06014, 0.320828, -2.10717, 0.194664, 1.33945, 0.49531] +f_494: 8151.40165 [0.0574711, -1.04644, -1.06025, 0.320802, -2.10728, 0.194599, 1.33936, 0.495324] +f_495: 8151.40164 [0.0574657, -1.04645, -1.06019, 0.32101, -2.10729, 0.194643, 1.33941, 0.495364] +f_496: 8151.40163 [0.0574722, -1.04645, -1.06019, 0.320998, -2.10728, 0.194609, 1.33946, 0.495317] +f_497: 8151.40162 [0.0574656, -1.04645, -1.06018, 0.32099, -2.10716, 0.194643, 1.33948, 0.49529] +f_498: 8151.40162 [0.0574646, -1.04645, -1.06012, 0.320935, -2.10709, 0.194684, 1.33949, 0.495273] +f_499: 8151.40161 [0.05746, -1.04645, -1.06011, 0.320901, -2.10698, 0.194712, 1.33951, 0.495264] +f_500: 8151.40161 [0.0574612, -1.04645, -1.06005, 0.32084, -2.10695, 0.194722, 1.33954, 0.495247] +f_501: 8151.4016 [0.0574506, -1.04645, -1.06002, 0.320825, -2.10688, 0.194798, 1.33952, 0.495268] +f_502: 8151.4016 [0.0574554, -1.04644, -1.06003, 0.320813, -2.10679, 0.194789, 1.33956, 0.495252] +f_503: 8151.4016 [0.0574453, -1.04644, -1.05996, 0.320771, -2.10675, 0.194839, 1.33958, 0.495267] +f_504: 8151.40159 [0.0574514, -1.04644, -1.05994, 0.320745, -2.10665, 0.194819, 1.33956, 0.495225] +f_505: 8151.40159 [0.0574556, -1.04644, -1.05993, 0.32072, -2.10659, 0.194896, 1.33959, 0.495237] +f_506: 8151.40159 [0.0574461, -1.04643, -1.05989, 0.320756, -2.1065, 0.19486, 1.33959, 0.495266] +f_507: 8151.40159 [0.0574467, -1.04643, -1.05984, 0.320714, -2.10646, 0.194876, 1.33963, 0.49524] +f_508: 8151.40158 [0.0574417, -1.04643, -1.05979, 0.320671, -2.1064, 0.194913, 1.33959, 0.495257] +f_509: 8151.40158 [0.0574406, -1.04643, -1.05981, 0.320678, -2.10629, 0.194914, 1.33961, 0.495231] +f_510: 8151.40158 [0.0574405, -1.04643, -1.05974, 0.320676, -2.10625, 0.19496, 1.33963, 0.49527] +f_511: 8151.40158 [0.0574392, -1.04643, -1.05968, 0.320671, -2.10616, 0.194921, 1.33963, 0.495257] +f_512: 8151.40158 [0.057437, -1.04642, -1.05959, 0.320623, -2.10604, 0.194917, 1.33966, 0.495275] +f_513: 8151.40157 [0.0574392, -1.04644, -1.05972, 0.320658, -2.10618, 0.194946, 1.33962, 0.495254] +f_514: 8151.40157 [0.0574322, -1.04644, -1.05967, 0.320707, -2.10614, 0.194994, 1.33963, 0.495247] +f_515: 8151.40157 [0.057434, -1.04644, -1.05963, 0.320698, -2.10606, 0.194966, 1.33964, 0.495276] +f_516: 8151.40156 [0.0574296, -1.04645, -1.05961, 0.320725, -2.10597, 0.194971, 1.33964, 0.495277] +f_517: 8151.40156 [0.0574295, -1.04646, -1.05955, 0.320664, -2.10596, 0.19501, 1.33969, 0.495367] +f_518: 8151.40156 [0.0574321, -1.04647, -1.05959, 0.320747, -2.1059, 0.194932, 1.33967, 0.495415] +f_519: 8151.40155 [0.0574271, -1.04648, -1.0595, 0.320718, -2.10587, 0.194972, 1.33969, 0.495317] +f_520: 8151.40154 [0.0574244, -1.0465, -1.05941, 0.320671, -2.10578, 0.195022, 1.33969, 0.495322] +f_521: 8151.40153 [0.0574256, -1.04653, -1.05931, 0.320551, -2.10568, 0.19506, 1.33976, 0.495331] +f_522: 8151.40151 [0.057419, -1.04656, -1.05914, 0.320772, -2.10574, 0.195028, 1.33971, 0.495279] +f_523: 8151.4015 [0.0574234, -1.04659, -1.05921, 0.320676, -2.10568, 0.195103, 1.33964, 0.495327] +f_524: 8151.40148 [0.057425, -1.04665, -1.05921, 0.320395, -2.10589, 0.195085, 1.33967, 0.495243] +f_525: 8151.40146 [0.0574086, -1.04671, -1.05903, 0.320758, -2.10552, 0.195156, 1.33969, 0.495465] +f_526: 8151.40144 [0.0573923, -1.04675, -1.05892, 0.320351, -2.10515, 0.195546, 1.33966, 0.495229] +f_527: 8151.40141 [0.0573985, -1.04681, -1.05895, 0.320399, -2.10521, 0.195368, 1.33967, 0.495357] +f_528: 8151.40137 [0.0574148, -1.04694, -1.05899, 0.320619, -2.10599, 0.195293, 1.33975, 0.495326] +f_529: 8151.40132 [0.0573966, -1.04707, -1.05849, 0.320203, -2.10549, 0.195489, 1.33973, 0.495272] +f_530: 8151.40124 [0.0573899, -1.04734, -1.05841, 0.320718, -2.10553, 0.195597, 1.33988, 0.495481] +f_531: 8151.40145 [0.0574937, -1.04778, -1.06063, 0.318697, -2.10927, 0.195896, 1.33966, 0.494897] +f_532: 8151.40147 [0.0572493, -1.04734, -1.05844, 0.32108, -2.10569, 0.196964, 1.33956, 0.495303] +f_533: 8151.40122 [0.0573032, -1.04757, -1.05802, 0.319753, -2.10421, 0.196996, 1.33956, 0.495344] +f_534: 8151.40621 [0.0577308, -1.04757, -1.05774, 0.319496, -2.10449, 0.197454, 1.34045, 0.494978] +f_535: 8151.40126 [0.0573304, -1.0476, -1.05699, 0.320159, -2.10531, 0.19668, 1.33997, 0.494972] +f_536: 8151.40296 [0.0575651, -1.04757, -1.05822, 0.320404, -2.10397, 0.197205, 1.33945, 0.494941] +f_537: 8151.40123 [0.0573089, -1.04757, -1.05813, 0.32038, -2.10392, 0.196956, 1.33985, 0.494975] +f_538: 8151.40125 [0.0572891, -1.04756, -1.05783, 0.319832, -2.10383, 0.197078, 1.33955, 0.495377] +f_539: 8151.40118 [0.0573243, -1.04758, -1.05805, 0.320144, -2.10432, 0.19701, 1.33959, 0.495147] +f_540: 8151.40119 [0.0573493, -1.04757, -1.05808, 0.319962, -2.10422, 0.196657, 1.33957, 0.495014] +f_541: 8151.40518 [0.0577071, -1.04757, -1.05801, 0.320119, -2.10433, 0.197041, 1.33961, 0.49514] +f_542: 8151.40119 [0.0573323, -1.04757, -1.05821, 0.320079, -2.10424, 0.196957, 1.33962, 0.495099] +f_543: 8151.40123 [0.0573704, -1.04758, -1.05807, 0.320217, -2.10431, 0.196957, 1.33959, 0.495195] +f_544: 8151.40118 [0.0573291, -1.04758, -1.05805, 0.320185, -2.10427, 0.196918, 1.33957, 0.49514] +f_545: 8151.40118 [0.0573282, -1.04758, -1.05807, 0.32023, -2.10426, 0.196902, 1.33962, 0.495175] +f_546: 8151.40118 [0.0573306, -1.04757, -1.05808, 0.320306, -2.10431, 0.196902, 1.33963, 0.495216] +f_547: 8151.40118 [0.0573259, -1.04758, -1.0581, 0.320352, -2.10429, 0.196941, 1.33959, 0.495257] +f_548: 8151.40118 [0.0573256, -1.04758, -1.0581, 0.320286, -2.10436, 0.197007, 1.33959, 0.495287] +f_549: 8151.40117 [0.0573279, -1.04758, -1.05813, 0.320332, -2.10435, 0.196928, 1.33959, 0.495252] +f_550: 8151.40117 [0.0573309, -1.04758, -1.05814, 0.320329, -2.10442, 0.196927, 1.33958, 0.495238] +f_551: 8151.40116 [0.0573321, -1.04758, -1.05818, 0.320312, -2.10448, 0.196908, 1.33957, 0.495244] +f_552: 8151.40116 [0.0573374, -1.04759, -1.05822, 0.320306, -2.10461, 0.196877, 1.33955, 0.495245] +f_553: 8151.40115 [0.057344, -1.0476, -1.05823, 0.320317, -2.10489, 0.1968, 1.33952, 0.495243] +f_554: 8151.40114 [0.0573568, -1.04761, -1.0584, 0.320236, -2.10506, 0.196694, 1.33956, 0.495273] +f_555: 8151.40114 [0.0573615, -1.04761, -1.05832, 0.32023, -2.10532, 0.196624, 1.33948, 0.49531] +f_556: 8151.40113 [0.0573612, -1.04762, -1.0584, 0.320202, -2.10504, 0.196683, 1.33956, 0.495271] +f_557: 8151.40113 [0.0573569, -1.04762, -1.05837, 0.320246, -2.10513, 0.196714, 1.33962, 0.495244] +f_558: 8151.40113 [0.0573595, -1.04763, -1.05841, 0.320336, -2.10514, 0.196675, 1.33958, 0.49522] +f_559: 8151.40113 [0.0573562, -1.04763, -1.05845, 0.320349, -2.10521, 0.196759, 1.33957, 0.495203] +f_560: 8151.40112 [0.0573582, -1.04764, -1.05848, 0.320468, -2.10517, 0.196701, 1.33957, 0.495186] +f_561: 8151.40112 [0.0573538, -1.04765, -1.05846, 0.320567, -2.10531, 0.196823, 1.3396, 0.495092] +f_562: 8151.40164 [0.0572199, -1.04764, -1.05846, 0.320464, -2.10515, 0.196678, 1.33951, 0.495134] +f_563: 8151.40112 [0.0573564, -1.04764, -1.05855, 0.320548, -2.10524, 0.196748, 1.33958, 0.495214] +f_564: 8151.40112 [0.0573565, -1.04765, -1.05862, 0.320583, -2.10522, 0.196764, 1.33954, 0.495151] +f_565: 8151.40112 [0.0573573, -1.04764, -1.05861, 0.320562, -2.10523, 0.19675, 1.33956, 0.495222] +f_566: 8151.40112 [0.0573573, -1.04765, -1.05862, 0.320549, -2.10526, 0.196751, 1.33956, 0.495236] +f_567: 8151.40111 [0.0573612, -1.04766, -1.05867, 0.32055, -2.10531, 0.196723, 1.33957, 0.495248] +f_568: 8151.4011 [0.0573689, -1.04767, -1.05874, 0.320548, -2.10547, 0.196637, 1.33963, 0.495264] +f_569: 8151.40109 [0.0573768, -1.04771, -1.05857, 0.320618, -2.10557, 0.196466, 1.3396, 0.495401] +f_570: 8151.4011 [0.0573887, -1.04773, -1.05841, 0.320526, -2.1057, 0.196236, 1.33986, 0.495338] +f_571: 8151.40218 [0.0575705, -1.04771, -1.05857, 0.320586, -2.10568, 0.19656, 1.33968, 0.495493] +f_572: 8151.40108 [0.0573834, -1.04771, -1.05865, 0.320585, -2.10557, 0.196306, 1.33953, 0.495268] +f_573: 8151.40108 [0.0573804, -1.04773, -1.0587, 0.320492, -2.1055, 0.196453, 1.33961, 0.49532] +f_574: 8151.40107 [0.0573752, -1.04774, -1.05875, 0.32071, -2.10546, 0.196475, 1.33956, 0.495329] +f_575: 8151.40108 [0.0573757, -1.04775, -1.05877, 0.320543, -2.10549, 0.196489, 1.33943, 0.495331] +f_576: 8151.40169 [0.0572279, -1.04774, -1.05882, 0.320679, -2.10549, 0.196426, 1.33959, 0.495369] +f_577: 8151.40107 [0.057378, -1.04775, -1.05882, 0.320663, -2.1054, 0.196409, 1.33956, 0.495352] +f_578: 8151.40107 [0.0573793, -1.04775, -1.05885, 0.320612, -2.10549, 0.196441, 1.33956, 0.495387] +f_579: 8151.40107 [0.0573821, -1.04776, -1.05889, 0.32062, -2.10548, 0.196377, 1.33953, 0.495387] +f_580: 8151.40106 [0.0573874, -1.04778, -1.05898, 0.320635, -2.10552, 0.196309, 1.33951, 0.495415] +f_581: 8151.40105 [0.0574039, -1.04782, -1.05904, 0.320679, -2.10581, 0.196064, 1.3395, 0.49545] +f_582: 8151.4011 [0.0574592, -1.04783, -1.05913, 0.320206, -2.10659, 0.19529, 1.33947, 0.495654] +f_583: 8151.40185 [0.0575636, -1.04782, -1.05887, 0.320688, -2.10584, 0.196206, 1.33941, 0.495625] +f_584: 8151.40104 [0.0573995, -1.04786, -1.05893, 0.320841, -2.10557, 0.196076, 1.33945, 0.495391] +f_585: 8151.40104 [0.0573937, -1.04789, -1.05921, 0.321019, -2.10559, 0.196198, 1.33935, 0.495397] +f_586: 8151.406 [0.0569698, -1.04785, -1.0589, 0.321021, -2.10557, 0.196058, 1.33945, 0.495442] +f_587: 8151.40103 [0.0574021, -1.04787, -1.0589, 0.320639, -2.10549, 0.196008, 1.33949, 0.495401] +f_588: 8151.40101 [0.0574073, -1.04791, -1.05882, 0.320595, -2.10538, 0.195869, 1.33953, 0.495423] +f_589: 8151.40098 [0.0574136, -1.048, -1.05864, 0.320663, -2.10523, 0.195688, 1.3396, 0.49543] +f_590: 8151.40093 [0.0574281, -1.04817, -1.05878, 0.320766, -2.10507, 0.1954, 1.33943, 0.49547] +f_591: 8151.40087 [0.057447, -1.04851, -1.05801, 0.320715, -2.10388, 0.194616, 1.33976, 0.495665] +f_592: 8151.40083 [0.0574955, -1.04864, -1.05892, 0.320942, -2.10502, 0.194209, 1.33942, 0.495689] +f_593: 8151.40081 [0.0574557, -1.04873, -1.0587, 0.321245, -2.10382, 0.19459, 1.3396, 0.495586] +f_594: 8151.4008 [0.0575071, -1.04889, -1.05899, 0.32133, -2.1045, 0.193822, 1.33939, 0.495759] +f_595: 8151.40075 [0.0574902, -1.04896, -1.05845, 0.320861, -2.10375, 0.193972, 1.33957, 0.495612] +f_596: 8151.4007 [0.0575056, -1.04916, -1.05861, 0.321204, -2.10378, 0.193724, 1.3395, 0.495562] +f_597: 8151.40063 [0.0575339, -1.04956, -1.05769, 0.321444, -2.10328, 0.192802, 1.33967, 0.495434] +f_598: 8151.4006 [0.0575992, -1.04986, -1.05779, 0.32179, -2.10385, 0.191777, 1.33962, 0.495509] +f_599: 8151.4006 [0.0575701, -1.04991, -1.05769, 0.321786, -2.10304, 0.192137, 1.33963, 0.495507] +f_600: 8151.402 [0.0573496, -1.04986, -1.05776, 0.322387, -2.1038, 0.191823, 1.34007, 0.495834] +f_601: 8151.40071 [0.0575917, -1.04986, -1.05794, 0.322287, -2.10364, 0.191708, 1.34018, 0.495614] +f_602: 8151.40467 [0.057193, -1.04986, -1.05784, 0.321367, -2.10396, 0.19186, 1.33975, 0.495549] +f_603: 8151.4006 [0.0575829, -1.04986, -1.05764, 0.321394, -2.1038, 0.191747, 1.33973, 0.495676] +f_604: 8151.40061 [0.0575833, -1.04986, -1.05736, 0.321429, -2.10356, 0.191469, 1.33978, 0.495613] +f_605: 8151.4014 [0.0574137, -1.04986, -1.05774, 0.32139, -2.10368, 0.191636, 1.33969, 0.495742] +f_606: 8151.40058 [0.0575737, -1.04986, -1.05757, 0.321497, -2.10386, 0.191846, 1.33975, 0.495553] +f_607: 8151.40057 [0.0575704, -1.04987, -1.05776, 0.321878, -2.10403, 0.192009, 1.33971, 0.495428] +f_608: 8151.40057 [0.0575542, -1.04987, -1.05799, 0.321838, -2.10372, 0.192255, 1.33961, 0.495462] +f_609: 8151.40058 [0.0575411, -1.04989, -1.05822, 0.321995, -2.10351, 0.192489, 1.33952, 0.495429] +f_610: 8151.40095 [0.0576806, -1.04989, -1.05805, 0.321828, -2.1037, 0.192142, 1.3396, 0.49548] +f_611: 8151.40057 [0.0575639, -1.04988, -1.05811, 0.321707, -2.10376, 0.192153, 1.33967, 0.495452] +f_612: 8151.40057 [0.0575644, -1.04989, -1.05805, 0.321752, -2.10389, 0.192126, 1.33959, 0.495508] +f_613: 8151.40057 [0.0575612, -1.04987, -1.05811, 0.321704, -2.10377, 0.19217, 1.33967, 0.495452] +f_614: 8151.40057 [0.0575658, -1.04988, -1.05806, 0.321793, -2.1038, 0.192097, 1.33965, 0.495457] +f_615: 8151.40058 [0.0575399, -1.04988, -1.0581, 0.321691, -2.10373, 0.19216, 1.33969, 0.495421] +f_616: 8151.40056 [0.0575654, -1.04988, -1.05808, 0.321645, -2.10376, 0.192169, 1.33965, 0.495437] +f_617: 8151.40056 [0.0575628, -1.04988, -1.05806, 0.321579, -2.10375, 0.192199, 1.33965, 0.495436] +f_618: 8151.40055 [0.0575623, -1.04988, -1.05804, 0.321441, -2.10368, 0.192233, 1.33964, 0.495419] +f_619: 8151.40055 [0.0575623, -1.04989, -1.05813, 0.321234, -2.10348, 0.192226, 1.33963, 0.495376] +f_620: 8151.40055 [0.0575594, -1.04989, -1.05805, 0.321356, -2.10368, 0.192286, 1.33959, 0.4954] +f_621: 8151.40054 [0.0575587, -1.04989, -1.05806, 0.321318, -2.10373, 0.192356, 1.33957, 0.495333] +f_622: 8151.40053 [0.0575552, -1.0499, -1.05813, 0.321261, -2.1038, 0.192463, 1.33954, 0.495206] +f_623: 8151.40053 [0.0575485, -1.04991, -1.05817, 0.321141, -2.10368, 0.192642, 1.33945, 0.495132] +f_624: 8151.40104 [0.0576927, -1.0499, -1.05811, 0.321307, -2.10387, 0.19252, 1.33954, 0.49519] +f_625: 8151.40053 [0.0575581, -1.0499, -1.05811, 0.321272, -2.10389, 0.192468, 1.33952, 0.49513] +f_626: 8151.40053 [0.0575542, -1.0499, -1.05816, 0.321226, -2.10385, 0.192484, 1.33953, 0.495186] +f_627: 8151.40052 [0.0575571, -1.0499, -1.05817, 0.321205, -2.10389, 0.192531, 1.33955, 0.495198] +f_628: 8151.40052 [0.0575527, -1.0499, -1.05816, 0.321184, -2.10394, 0.192635, 1.33961, 0.495194] +f_629: 8151.40051 [0.0575459, -1.0499, -1.05821, 0.321184, -2.10391, 0.19283, 1.33969, 0.495084] +f_630: 8151.40057 [0.0575908, -1.04991, -1.0582, 0.321096, -2.10395, 0.19289, 1.33968, 0.495104] +f_631: 8151.40051 [0.0575422, -1.0499, -1.05814, 0.32111, -2.10396, 0.192888, 1.33964, 0.495099] +f_632: 8151.4005 [0.057543, -1.04991, -1.0581, 0.320891, -2.10402, 0.192948, 1.33955, 0.49505] +f_633: 8151.40049 [0.0575356, -1.04992, -1.05822, 0.32102, -2.10407, 0.193117, 1.33956, 0.495116] +f_634: 8151.40049 [0.0575262, -1.04995, -1.05843, 0.321255, -2.10407, 0.193385, 1.33948, 0.495069] +f_635: 8151.40263 [0.0578063, -1.04995, -1.05853, 0.321232, -2.10409, 0.193428, 1.33945, 0.495034] +f_636: 8151.40047 [0.0575227, -1.04995, -1.05845, 0.321083, -2.10423, 0.193512, 1.33951, 0.49518] +f_637: 8151.40048 [0.0575263, -1.04994, -1.05872, 0.320691, -2.10442, 0.19372, 1.33949, 0.495018] +f_638: 8151.40082 [0.0576348, -1.04995, -1.05839, 0.32093, -2.10407, 0.193548, 1.3395, 0.495173] +f_639: 8151.40046 [0.0575152, -1.04995, -1.0585, 0.321048, -2.10425, 0.193774, 1.33953, 0.495156] +f_640: 8151.40045 [0.0574958, -1.04996, -1.05863, 0.320963, -2.10431, 0.194306, 1.3395, 0.495183] +f_641: 8151.40045 [0.0574624, -1.04994, -1.0591, 0.320916, -2.10448, 0.195244, 1.33942, 0.495039] +f_642: 8151.40199 [0.0577389, -1.04996, -1.05861, 0.32093, -2.10459, 0.194284, 1.33947, 0.495247] +f_643: 8151.40045 [0.0574913, -1.04998, -1.05868, 0.320942, -2.10473, 0.194537, 1.33932, 0.495093] +f_644: 8151.40072 [0.0575985, -1.04997, -1.05862, 0.320875, -2.10432, 0.194321, 1.33961, 0.495121] +f_645: 8151.40044 [0.057497, -1.04997, -1.05875, 0.320831, -2.10441, 0.194411, 1.33959, 0.495212] +f_646: 8151.40043 [0.057486, -1.04999, -1.059, 0.32075, -2.10447, 0.194776, 1.33957, 0.495321] +f_647: 8151.40042 [0.0574856, -1.04998, -1.05911, 0.320674, -2.10499, 0.195035, 1.33955, 0.495197] +f_648: 8151.40043 [0.0574747, -1.04996, -1.05915, 0.320192, -2.10494, 0.195366, 1.33957, 0.495165] +f_649: 8151.4026 [0.0577583, -1.04998, -1.05896, 0.320787, -2.105, 0.195167, 1.33959, 0.495213] +f_650: 8151.40042 [0.057484, -1.04998, -1.0592, 0.320913, -2.10518, 0.195101, 1.33953, 0.495181] +f_651: 8151.40101 [0.0576313, -1.04998, -1.05922, 0.320893, -2.10518, 0.195075, 1.33953, 0.495108] +f_652: 8151.40042 [0.05748, -1.04998, -1.05919, 0.320898, -2.10517, 0.195173, 1.33956, 0.49526] +f_653: 8151.40041 [0.0574737, -1.04999, -1.05931, 0.320874, -2.10509, 0.195352, 1.33959, 0.495349] +f_654: 8151.40041 [0.0574731, -1.05, -1.05935, 0.320886, -2.10535, 0.195464, 1.33957, 0.49538] +f_655: 8151.40041 [0.0574695, -1.05003, -1.05945, 0.320965, -2.10559, 0.195649, 1.33954, 0.495431] +f_656: 8151.4004 [0.057472, -1.05006, -1.05938, 0.321128, -2.10556, 0.195554, 1.33957, 0.495483] +f_657: 8151.40041 [0.0574731, -1.05013, -1.05928, 0.321479, -2.1054, 0.195438, 1.33964, 0.495622] +f_658: 8151.40104 [0.0573069, -1.05007, -1.05933, 0.321075, -2.10534, 0.195731, 1.33949, 0.495321] +f_659: 8151.4004 [0.0574534, -1.05008, -1.05943, 0.321303, -2.10541, 0.195906, 1.33957, 0.495389] +f_660: 8151.40039 [0.0574442, -1.0501, -1.05953, 0.321083, -2.10542, 0.196178, 1.33957, 0.495444] +f_661: 8151.40039 [0.0574241, -1.05015, -1.05985, 0.320872, -2.10528, 0.196806, 1.33963, 0.495555] +f_662: 8151.40039 [0.0574326, -1.05009, -1.05964, 0.320925, -2.10542, 0.196474, 1.33954, 0.495374] +f_663: 8151.40038 [0.0574474, -1.05013, -1.05954, 0.32096, -2.10576, 0.196323, 1.33958, 0.495392] +f_664: 8151.40037 [0.0574448, -1.0502, -1.05959, 0.320953, -2.10629, 0.196552, 1.33957, 0.495321] +f_665: 8151.40036 [0.0574263, -1.05025, -1.05978, 0.320832, -2.10635, 0.197123, 1.33969, 0.495444] +f_666: 8151.40038 [0.0574114, -1.05032, -1.06015, 0.320905, -2.10703, 0.197965, 1.33963, 0.495511] +f_667: 8151.4158 [0.0566742, -1.05025, -1.05961, 0.320898, -2.10654, 0.197158, 1.33985, 0.495581] +f_668: 8151.40041 [0.057458, -1.05027, -1.05971, 0.320765, -2.10726, 0.196836, 1.34003, 0.495358] +f_669: 8151.40091 [0.0572759, -1.05025, -1.05985, 0.320979, -2.10614, 0.197287, 1.33986, 0.495239] +f_670: 8151.40034 [0.0574166, -1.05027, -1.05955, 0.320958, -2.10602, 0.197186, 1.33976, 0.495328] +f_671: 8151.40033 [0.057427, -1.0503, -1.05948, 0.320808, -2.1059, 0.196938, 1.3398, 0.495307] +f_672: 8151.40032 [0.0574505, -1.05035, -1.05913, 0.32091, -2.10595, 0.196342, 1.33983, 0.495261] +f_673: 8151.40031 [0.057461, -1.05041, -1.05883, 0.321074, -2.10584, 0.195929, 1.33984, 0.495247] +f_674: 8151.40038 [0.0575238, -1.05048, -1.05814, 0.321435, -2.10637, 0.194487, 1.33976, 0.495075] +f_675: 8151.40407 [0.0578263, -1.0504, -1.05858, 0.320733, -2.10543, 0.195865, 1.33994, 0.49511] +f_676: 8151.40034 [0.057447, -1.05045, -1.05906, 0.321848, -2.10615, 0.196285, 1.33974, 0.495398] +f_677: 8151.4012 [0.0572701, -1.05042, -1.05865, 0.321122, -2.10594, 0.196132, 1.33974, 0.495007] +f_678: 8151.4003 [0.0574603, -1.05043, -1.05901, 0.320952, -2.10592, 0.196008, 1.33958, 0.495237] +f_679: 8151.40031 [0.0574472, -1.05044, -1.05893, 0.320937, -2.1062, 0.196249, 1.33979, 0.495214] +f_680: 8151.40528 [0.0570348, -1.05043, -1.05901, 0.320886, -2.10585, 0.196, 1.33959, 0.495262] +f_681: 8151.40031 [0.0574576, -1.05043, -1.05897, 0.321197, -2.10596, 0.196057, 1.33962, 0.495173] +f_682: 8151.4003 [0.05746, -1.05044, -1.059, 0.320936, -2.10594, 0.196025, 1.33957, 0.495225] +f_683: 8151.4003 [0.057462, -1.05044, -1.05893, 0.320944, -2.10594, 0.196069, 1.33955, 0.495274] +f_684: 8151.4003 [0.05746, -1.05044, -1.05898, 0.32094, -2.10591, 0.196004, 1.33956, 0.495222] +f_685: 8151.4003 [0.0574618, -1.05044, -1.05896, 0.320904, -2.10588, 0.196007, 1.33959, 0.495245] +f_686: 8151.4003 [0.0574628, -1.05045, -1.05894, 0.32085, -2.10586, 0.195977, 1.33963, 0.4953] +f_687: 8151.4003 [0.0574698, -1.05047, -1.05906, 0.320895, -2.10582, 0.195852, 1.33961, 0.495348] +f_688: 8151.40029 [0.0574705, -1.05048, -1.059, 0.320742, -2.10571, 0.195797, 1.33962, 0.49541] +f_689: 8151.40028 [0.0574658, -1.0505, -1.0589, 0.320894, -2.10571, 0.195873, 1.33964, 0.495327] +f_690: 8151.40029 [0.0574621, -1.05053, -1.05874, 0.321133, -2.10576, 0.195895, 1.33961, 0.495136] +f_691: 8151.40127 [0.0572745, -1.05049, -1.05883, 0.320935, -2.10558, 0.195812, 1.33951, 0.495275] +f_692: 8151.40028 [0.0574589, -1.05051, -1.05886, 0.321092, -2.10555, 0.195939, 1.33968, 0.495382] +f_693: 8151.40028 [0.0574655, -1.05053, -1.05884, 0.321053, -2.10562, 0.195811, 1.33968, 0.495406] +f_694: 8151.40027 [0.0574655, -1.05054, -1.05881, 0.320945, -2.10557, 0.195813, 1.33962, 0.4953] +f_695: 8151.40027 [0.0574692, -1.05057, -1.0589, 0.320541, -2.10553, 0.195862, 1.33952, 0.495246] +f_696: 8151.40027 [0.0574815, -1.05059, -1.05865, 0.32053, -2.1054, 0.195454, 1.33957, 0.495178] +f_697: 8151.40142 [0.0572562, -1.05057, -1.05874, 0.320551, -2.10543, 0.195936, 1.33941, 0.495268] +f_698: 8151.40026 [0.0574668, -1.05058, -1.05885, 0.320713, -2.10534, 0.195894, 1.33958, 0.495336] +f_699: 8151.40026 [0.0574531, -1.05061, -1.05891, 0.321022, -2.10511, 0.196001, 1.33964, 0.495473] +f_700: 8151.40025 [0.0574513, -1.05065, -1.059, 0.32069, -2.10508, 0.196153, 1.33966, 0.495488] +f_701: 8151.40027 [0.057439, -1.05072, -1.05934, 0.320706, -2.10511, 0.19665, 1.33976, 0.495716] +f_702: 8151.40521 [0.0578846, -1.05066, -1.05921, 0.320687, -2.10487, 0.195938, 1.33951, 0.495399] +f_703: 8151.40025 [0.0574448, -1.05068, -1.05871, 0.320813, -2.10478, 0.196099, 1.33969, 0.495434] +f_704: 8151.40025 [0.0574518, -1.05073, -1.05823, 0.320968, -2.10425, 0.195562, 1.33962, 0.495389] +f_705: 8151.40422 [0.0578304, -1.05068, -1.05871, 0.320724, -2.10487, 0.195999, 1.33987, 0.495476] +f_706: 8151.40026 [0.0574257, -1.05069, -1.05875, 0.320761, -2.1043, 0.196333, 1.33957, 0.495251] +f_707: 8151.40038 [0.0573754, -1.05068, -1.05874, 0.320822, -2.10467, 0.196111, 1.33968, 0.495618] +f_708: 8151.40024 [0.0574458, -1.05069, -1.05881, 0.32094, -2.10488, 0.196057, 1.33959, 0.495349] +f_709: 8151.40024 [0.0574438, -1.0507, -1.05871, 0.320736, -2.10481, 0.196111, 1.33958, 0.495295] +f_710: 8151.40024 [0.0574446, -1.05072, -1.05876, 0.320737, -2.10464, 0.196045, 1.33962, 0.495327] +f_711: 8151.40053 [0.057544, -1.05071, -1.05869, 0.320838, -2.10484, 0.196186, 1.33959, 0.495313] +f_712: 8151.40024 [0.0574379, -1.05071, -1.05866, 0.320824, -2.1048, 0.196185, 1.3396, 0.495332] +f_713: 8151.40024 [0.0574368, -1.05071, -1.0587, 0.320899, -2.10472, 0.196204, 1.33958, 0.495359] +f_714: 8151.40024 [0.0574507, -1.0507, -1.05865, 0.320794, -2.1048, 0.196201, 1.33956, 0.495342] +f_715: 8151.40023 [0.0574414, -1.05071, -1.05864, 0.320816, -2.10486, 0.196153, 1.33961, 0.495326] +f_716: 8151.40023 [0.0574439, -1.05071, -1.0586, 0.320838, -2.10495, 0.196075, 1.33959, 0.495315] +f_717: 8151.40023 [0.0574477, -1.05072, -1.05853, 0.320804, -2.10497, 0.196019, 1.3396, 0.49528] +f_718: 8151.40023 [0.0574547, -1.05073, -1.05845, 0.320738, -2.10496, 0.195855, 1.33959, 0.495249] +f_719: 8151.40023 [0.0574513, -1.05075, -1.05838, 0.320688, -2.10482, 0.195882, 1.33962, 0.495225] +f_720: 8151.40023 [0.0574436, -1.05079, -1.05825, 0.320485, -2.10445, 0.195935, 1.33954, 0.49519] +f_721: 8151.40022 [0.0574483, -1.05077, -1.05836, 0.320704, -2.10476, 0.195925, 1.33962, 0.49523] +f_722: 8151.40022 [0.057453, -1.05079, -1.05845, 0.320694, -2.10483, 0.195908, 1.33965, 0.495291] +f_723: 8151.40021 [0.0574448, -1.05083, -1.05841, 0.320613, -2.10476, 0.196087, 1.33973, 0.495203] +f_724: 8151.4002 [0.0574467, -1.05087, -1.05843, 0.320606, -2.1048, 0.195965, 1.33969, 0.495362] +f_725: 8151.4002 [0.0574591, -1.0509, -1.05831, 0.320422, -2.10465, 0.195751, 1.33968, 0.495257] +f_726: 8151.40019 [0.0574401, -1.05093, -1.05819, 0.320549, -2.1047, 0.196154, 1.33972, 0.495272] +f_727: 8151.40019 [0.0574396, -1.05096, -1.05838, 0.320336, -2.10493, 0.196393, 1.33981, 0.495344] +f_728: 8151.40019 [0.0574235, -1.05099, -1.05828, 0.320689, -2.10467, 0.196525, 1.33989, 0.495303] +f_729: 8151.4031 [0.0571158, -1.05096, -1.0583, 0.320287, -2.10495, 0.196331, 1.33994, 0.495402] +f_730: 8151.40019 [0.0574377, -1.05097, -1.05821, 0.320552, -2.10493, 0.196327, 1.33984, 0.495335] +f_731: 8151.40019 [0.0574334, -1.05097, -1.05821, 0.320394, -2.10466, 0.19635, 1.3398, 0.495333] +f_732: 8151.40054 [0.0573216, -1.05097, -1.05818, 0.320515, -2.10496, 0.196368, 1.33983, 0.495262] +f_733: 8151.40018 [0.0574408, -1.05097, -1.05829, 0.320562, -2.10494, 0.196273, 1.3398, 0.495331] +f_734: 8151.40018 [0.0574476, -1.05099, -1.05836, 0.320483, -2.10496, 0.196162, 1.33979, 0.495381] +f_735: 8151.40018 [0.0574444, -1.05101, -1.05829, 0.320455, -2.10487, 0.196205, 1.33979, 0.495351] +f_736: 8151.40018 [0.0574484, -1.05105, -1.05831, 0.320302, -2.10465, 0.19608, 1.33978, 0.495402] +f_737: 8151.40017 [0.0574399, -1.05108, -1.05813, 0.320431, -2.10484, 0.196276, 1.33981, 0.495338] +f_738: 8151.40016 [0.0574282, -1.05116, -1.05808, 0.320603, -2.1053, 0.1967, 1.33986, 0.495388] +f_739: 8151.40015 [0.0574353, -1.05125, -1.05791, 0.320459, -2.10509, 0.19648, 1.33989, 0.495352] +f_740: 8151.40014 [0.0574096, -1.05141, -1.05751, 0.320302, -2.10417, 0.196724, 1.33989, 0.495357] +f_741: 8151.40013 [0.057419, -1.05148, -1.05768, 0.320376, -2.1052, 0.196955, 1.34001, 0.495426] +f_742: 8151.40012 [0.0574236, -1.05151, -1.05743, 0.320408, -2.10491, 0.196658, 1.33996, 0.495364] +f_743: 8151.40012 [0.0574298, -1.05155, -1.05717, 0.320771, -2.10483, 0.19634, 1.33994, 0.495384] +f_744: 8151.4273 [0.0564301, -1.05151, -1.0576, 0.320473, -2.10494, 0.196636, 1.33972, 0.495278] +f_745: 8151.40013 [0.057421, -1.05155, -1.05717, 0.320689, -2.10498, 0.196654, 1.34, 0.495292] +f_746: 8151.4033 [0.0570848, -1.05151, -1.05728, 0.320297, -2.10497, 0.196654, 1.34003, 0.49549] +f_747: 8151.40012 [0.0574353, -1.05152, -1.05744, 0.320271, -2.10488, 0.196448, 1.33985, 0.495469] +f_748: 8151.40012 [0.0574406, -1.05153, -1.0574, 0.320274, -2.10462, 0.196253, 1.33988, 0.495477] +f_749: 8151.40014 [0.0574571, -1.05152, -1.05744, 0.320373, -2.10484, 0.196494, 1.33981, 0.495549] +f_750: 8151.40012 [0.0574308, -1.05152, -1.05742, 0.32042, -2.10483, 0.196494, 1.33988, 0.495493] +f_751: 8151.40012 [0.0574392, -1.05151, -1.05744, 0.320324, -2.10492, 0.196405, 1.33985, 0.495469] +f_752: 8151.40012 [0.0574357, -1.05151, -1.05738, 0.320333, -2.10491, 0.196412, 1.33983, 0.495468] +f_753: 8151.40012 [0.0574392, -1.05151, -1.05744, 0.320326, -2.10492, 0.196402, 1.33985, 0.495467] +f_754: 8151.40012 [0.0574387, -1.05152, -1.05743, 0.320348, -2.10491, 0.196385, 1.33984, 0.495507] +f_755: 8151.40019 [0.0574882, -1.05152, -1.05745, 0.320333, -2.10487, 0.196382, 1.33984, 0.495446] +f_756: 8151.40011 [0.0574375, -1.05152, -1.05745, 0.32036, -2.1049, 0.196413, 1.33982, 0.495441] +f_757: 8151.4001 [0.0574374, -1.05152, -1.0575, 0.32042, -2.10494, 0.196402, 1.33978, 0.495433] +f_758: 8151.4001 [0.0574415, -1.05154, -1.05755, 0.320465, -2.10507, 0.196362, 1.33974, 0.495447] +f_759: 8151.40009 [0.0574462, -1.05156, -1.05758, 0.320493, -2.10516, 0.196319, 1.33973, 0.495395] +f_760: 8151.40009 [0.0574559, -1.0516, -1.05751, 0.320437, -2.10512, 0.196084, 1.33978, 0.495262] +f_761: 8151.40009 [0.0574511, -1.05161, -1.05753, 0.320387, -2.1049, 0.19624, 1.33977, 0.495324] +f_762: 8151.40008 [0.0574441, -1.05162, -1.05756, 0.320547, -2.10479, 0.196245, 1.33972, 0.495389] +f_763: 8151.40007 [0.0574503, -1.05166, -1.05759, 0.320366, -2.10446, 0.196233, 1.33963, 0.495328] +f_764: 8151.40008 [0.0574435, -1.05168, -1.05757, 0.320504, -2.10396, 0.196021, 1.33966, 0.49541] +f_765: 8151.40016 [0.0574992, -1.05167, -1.05756, 0.320202, -2.1046, 0.196243, 1.33971, 0.495431] +f_766: 8151.40007 [0.0574437, -1.05167, -1.05752, 0.320459, -2.10465, 0.196348, 1.33959, 0.495367] +f_767: 8151.40006 [0.0574434, -1.05168, -1.05773, 0.320469, -2.10475, 0.196345, 1.33968, 0.495346] +f_768: 8151.40007 [0.0574489, -1.05169, -1.05774, 0.320367, -2.10495, 0.196327, 1.33958, 0.495439] +f_769: 8151.40017 [0.0575084, -1.05169, -1.05773, 0.320469, -2.10473, 0.196327, 1.33968, 0.49535] +f_770: 8151.40006 [0.0574386, -1.05168, -1.05769, 0.320419, -2.10477, 0.196442, 1.33971, 0.495373] +f_771: 8151.40015 [0.0573849, -1.05168, -1.05771, 0.320512, -2.10477, 0.196345, 1.3397, 0.495324] +f_772: 8151.40006 [0.0574436, -1.05168, -1.05773, 0.320456, -2.10475, 0.196326, 1.33966, 0.495352] +f_773: 8151.40006 [0.0574441, -1.05169, -1.05773, 0.320448, -2.10479, 0.196346, 1.33966, 0.495315] +f_774: 8151.40006 [0.0574436, -1.05169, -1.05773, 0.320482, -2.10483, 0.196358, 1.33968, 0.495317] +f_775: 8151.40006 [0.0574436, -1.0517, -1.05772, 0.320535, -2.10485, 0.196364, 1.33966, 0.49527] +f_776: 8151.40006 [0.0574456, -1.05172, -1.05765, 0.320653, -2.10494, 0.196307, 1.33967, 0.495228] +f_777: 8151.40007 [0.0574666, -1.05172, -1.05769, 0.32058, -2.10486, 0.196311, 1.3397, 0.495263] +f_778: 8151.40005 [0.0574433, -1.05172, -1.05772, 0.32064, -2.10486, 0.196393, 1.33967, 0.495235] +f_779: 8151.40005 [0.057439, -1.05173, -1.05783, 0.320546, -2.1048, 0.1965, 1.33965, 0.495318] +f_780: 8151.40004 [0.0574446, -1.05177, -1.05798, 0.320255, -2.10473, 0.196514, 1.33958, 0.495315] +f_781: 8151.40003 [0.0574419, -1.05179, -1.05818, 0.320668, -2.10507, 0.196667, 1.33956, 0.495354] +f_782: 8151.40003 [0.0574334, -1.05183, -1.05808, 0.320983, -2.10518, 0.196815, 1.33957, 0.495352] +f_783: 8151.40003 [0.0574429, -1.05184, -1.05842, 0.320857, -2.10555, 0.196952, 1.33955, 0.495325] +f_784: 8151.40002 [0.057439, -1.05188, -1.05852, 0.320967, -2.10525, 0.196901, 1.33949, 0.49537] +f_785: 8151.40003 [0.0574363, -1.05191, -1.05873, 0.320822, -2.10541, 0.197155, 1.33942, 0.495542] +f_786: 8151.40008 [0.0574886, -1.05188, -1.05848, 0.320986, -2.10537, 0.196701, 1.33942, 0.495457] +f_787: 8151.40002 [0.0574346, -1.05189, -1.05856, 0.321131, -2.10535, 0.197024, 1.33956, 0.495313] +f_788: 8151.40002 [0.057433, -1.05192, -1.05875, 0.321363, -2.10568, 0.197218, 1.33956, 0.495313] +f_789: 8151.40348 [0.0570734, -1.05189, -1.05856, 0.321264, -2.10524, 0.19703, 1.33958, 0.49542] +f_790: 8151.40001 [0.0574396, -1.05191, -1.05856, 0.321012, -2.1053, 0.196893, 1.33966, 0.495335] +f_791: 8151.40001 [0.0574347, -1.05192, -1.05849, 0.321088, -2.10514, 0.196905, 1.33965, 0.495328] +f_792: 8151.40001 [0.0574377, -1.05194, -1.05864, 0.321091, -2.10526, 0.196938, 1.33961, 0.495325] +f_793: 8151.4 [0.0574406, -1.05198, -1.05879, 0.321189, -2.10545, 0.196993, 1.3396, 0.495318] +f_794: 8151.4 [0.0574394, -1.05206, -1.05905, 0.321446, -2.1055, 0.19711, 1.33953, 0.495339] +f_795: 8151.40124 [0.057225, -1.05199, -1.05857, 0.320892, -2.10554, 0.197078, 1.33941, 0.495221] +f_796: 8151.4 [0.0574374, -1.05202, -1.05884, 0.321167, -2.10569, 0.197202, 1.33952, 0.495238] +f_797: 8151.40111 [0.0572458, -1.05202, -1.05895, 0.320982, -2.10581, 0.19706, 1.33952, 0.495206] +f_798: 8151.40001 [0.0574491, -1.05202, -1.05895, 0.321107, -2.10578, 0.196985, 1.33946, 0.49528] +f_799: 8151.40001 [0.0574546, -1.05201, -1.05891, 0.32109, -2.10565, 0.19727, 1.3395, 0.495282] +f_800: 8151.4 [0.0574367, -1.05202, -1.05892, 0.321131, -2.10561, 0.197189, 1.33956, 0.495204] +f_801: 8151.39999 [0.0574399, -1.05202, -1.05897, 0.321054, -2.10555, 0.197169, 1.3396, 0.495256] +f_802: 8151.39999 [0.0574407, -1.05203, -1.05899, 0.32087, -2.10539, 0.197105, 1.33959, 0.495277] +f_803: 8151.39998 [0.057439, -1.05207, -1.05899, 0.320838, -2.10509, 0.197045, 1.33958, 0.495298] +f_804: 8151.39998 [0.0574451, -1.05211, -1.05921, 0.320761, -2.10543, 0.197166, 1.33958, 0.49529] +f_805: 8151.39998 [0.0574433, -1.05215, -1.05929, 0.320776, -2.10533, 0.197172, 1.33956, 0.495272] +f_806: 8151.39997 [0.0574483, -1.05223, -1.05969, 0.320802, -2.10578, 0.197391, 1.33955, 0.495265] +f_807: 8151.39997 [0.0574465, -1.05231, -1.05988, 0.320697, -2.10557, 0.197442, 1.33949, 0.495309] +f_808: 8151.41144 [0.0580977, -1.05223, -1.05938, 0.320919, -2.10598, 0.197317, 1.33966, 0.495339] +f_809: 8151.39997 [0.0574507, -1.05225, -1.05939, 0.321038, -2.10584, 0.197139, 1.33967, 0.495232] +f_810: 8151.39997 [0.0574441, -1.05228, -1.05955, 0.321253, -2.10547, 0.197261, 1.33972, 0.495257] +f_811: 8151.4018 [0.0571874, -1.05225, -1.05927, 0.320984, -2.10586, 0.197306, 1.33969, 0.495306] +f_812: 8151.39996 [0.0574519, -1.05226, -1.05947, 0.320946, -2.10569, 0.197185, 1.33962, 0.495212] +f_813: 8151.39997 [0.0574606, -1.05227, -1.05961, 0.320827, -2.10563, 0.197046, 1.33965, 0.495308] +f_814: 8151.39996 [0.0574508, -1.05226, -1.05948, 0.320878, -2.10567, 0.197211, 1.33969, 0.495162] +f_815: 8151.39996 [0.0574525, -1.05227, -1.05955, 0.320961, -2.10573, 0.197224, 1.33966, 0.495216] +f_816: 8151.40037 [0.0575759, -1.05226, -1.05945, 0.320933, -2.1057, 0.197186, 1.33963, 0.495224] +f_817: 8151.39996 [0.0574525, -1.05227, -1.05944, 0.320924, -2.10568, 0.19719, 1.33965, 0.495239] +f_818: 8151.39996 [0.0574508, -1.05227, -1.05937, 0.320885, -2.10559, 0.197183, 1.33969, 0.495264] +f_819: 8151.39996 [0.0574509, -1.05228, -1.05929, 0.320919, -2.10538, 0.19708, 1.33963, 0.495193] +f_820: 8151.39996 [0.0574512, -1.05228, -1.05939, 0.320887, -2.10559, 0.197184, 1.3397, 0.495255] +f_821: 8151.39996 [0.0574498, -1.05229, -1.05935, 0.320877, -2.10547, 0.197165, 1.33966, 0.495244] +f_822: 8151.39995 [0.0574517, -1.05229, -1.05928, 0.320894, -2.10545, 0.197081, 1.3397, 0.495241] +f_823: 8151.39995 [0.0574503, -1.0523, -1.05922, 0.320991, -2.10546, 0.197084, 1.33971, 0.495215] +f_824: 8151.39995 [0.0574533, -1.05231, -1.0592, 0.321, -2.10545, 0.197008, 1.33973, 0.4952] + 6.818758 seconds (1.48 M allocations: 94.632 MiB, 0.53% gc time) Generalized Linear Mixed Model fit by minimizing the Laplace approximation to the deviance Formula: r2 ~ 1 + a + g + b + s + (1 | id) + (1 | item) Distribution: Distributions.Bernoulli{Float64} Link: GLM.LogitLink() - Deviance (Laplace approximation): 8151.3997 + Deviance (Laplace approximation): 8151.4000 Variance components: Column Variance Std.Dev. - id (Intercept) 1.79482666 1.33971141 - item (Intercept) 0.24533278 0.49531079 + id (Intercept) 1.79487217 1.33972840 + item (Intercept) 0.24522279 0.49519975 Number of obs: 7584; levels of grouping factors: 316, 24 Fixed-effects parameters: Estimate Std.Error z value P(>|z|) -(Intercept) 0.199089 0.40518 0.491359 0.6232 -a 0.0574294 0.0167573 3.42712 0.0006 -g: M 0.320678 0.191259 1.67667 0.0936 -b: scold -1.05879 0.256808 -4.12288 <1e-4 -b: shout -2.1054 0.258532 -8.14369 <1e-15 -s: self -1.0555 0.210305 -5.01891 <1e-6 +(Intercept) 0.197008 0.405161 0.486248 0.6268 +a 0.0574533 0.0167575 3.42852 0.0006 +g: M 0.321 0.191261 1.67834 0.0933 +b: scold -1.0592 0.256755 -4.12535 <1e-4 +b: shout -2.10545 0.258478 -8.14555 <1e-15 +s: self -1.05231 0.210261 -5.00476 <1e-6 ```` @@ -1251,8 +1708,8 @@ The comparison of the slow and fast fit is available in the optimization summary ````julia julia> mdl1.LMM.optsum -Initial parameter vector: [0.208273, 0.0543791, 0.304089, -1.0165, -2.0218, -1.01344, 1.33956, 0.496833] -Initial objective value: 8151.583340132033 +Initial parameter vector: [0.0543791, -1.01344, -1.0165, 0.304089, -2.0218, 0.208273, 1.33956, 0.496833] +Initial objective value: 8151.583340132032 Optimizer (from NLopt): LN_BOBYQA Lower bounds: [-Inf, -Inf, -Inf, -Inf, -Inf, -Inf, 0.0, 0.0] @@ -1263,9 +1720,9 @@ xtol_abs: [1.0e-10, 1.0e-10] initial_step: [0.135142, 0.00558444, 0.0637411, 0.0858438, 0.0864116, 0.0702961, 0.05, 0.05] maxfeval: -1 -Function evaluations: 197 -Final parameter vector: [0.199089, 0.0574294, 0.320678, -1.05879, -2.1054, -1.0555, 1.33971, 0.495311] -Final objective value: 8151.399719814065 +Function evaluations: 824 +Final parameter vector: [0.0574533, -1.05231, -1.0592, 0.321, -2.10545, 0.197008, 1.33973, 0.4952] +Final objective value: 8151.399954176272 Return code: FTOL_REACHED diff --git a/src/MixedModels.jl b/src/MixedModels.jl index 1575e4e62..192a28bfd 100644 --- a/src/MixedModels.jl +++ b/src/MixedModels.jl @@ -2,17 +2,17 @@ __precompile__() module MixedModels -using ArgCheck, BlockArrays, CategoricalArrays, DataFrames, Distributions +using ArgCheck, BlockArrays, CategoricalArrays, Compat, DataFrames, Distributions using GLM, NLopt, Showoff, StaticArrays, StatsBase, StatsModels using StatsFuns: log2π using NamedArrays: NamedArray, setnames! -using Base.LinAlg: BlasFloat, BlasReal, HermOrSym, PosDefException, checksquare, copytri! +using Compat.LinearAlgebra: BlasFloat, BlasReal, HermOrSym, PosDefException, checksquare, copytri! -import Base: cor, cond, convert, eltype, full, logdet, std -import Base.LinAlg: A_mul_B!, A_mul_Bc!, Ac_mul_B!, A_ldiv_B!, Ac_ldiv_B!, A_rdiv_B!, A_rdiv_Bc! +import Base: cor, convert, eltype, full, logdet, std +import Compat.LinearAlgebra: A_mul_B!, A_mul_Bc!, Ac_mul_B!, A_ldiv_B!, Ac_ldiv_B!, A_rdiv_B!, A_rdiv_Bc!, cond import NLopt: Opt -import StatsBase: coef, coeftable, dof, deviance, fit!, fitted, loglikelihood, - model_response, nobs, predict, vcov +import StatsBase: coef, coeftable, dof, deviance, fit, fit!, fitted, loglikelihood, + model_response, nobs, predict, stderr, vcov export @formula, @@ -47,6 +47,7 @@ export condVar, deviance, dof, + fit, fit!, fitlmm, fitted, @@ -91,5 +92,6 @@ include("pls.jl") include("simulate.jl") include("PIRLS.jl") include("mixedmodel.jl") +include("deprecates.jl") end # module diff --git a/src/PIRLS.jl b/src/PIRLS.jl index 43023e1b5..ea7daa170 100644 --- a/src/PIRLS.jl +++ b/src/PIRLS.jl @@ -13,19 +13,12 @@ function fixef(m::GeneralizedLinearMixedModel{T}, permuted=true) where T r == p ? m.β[iperm] : copy!(fill(-zero(T), p), m.β)[iperm] end -""" - glmm(f::Formula, fr::ModelFrame, d::Distribution[, l::GLM.Link]) - -Return a `GeneralizedLinearMixedModel` object. - -The value is ready to be `fit!` but has not yet been fit. -""" -function glmm(f::Formula, fr::AbstractDataFrame, d::Distribution, l::Link; wt=[], offset=[], - contrasts = Dict()) +function GeneralizedLinearMixedModel(f::Formula, fr::AbstractDataFrame, + d::Distribution, l::Link; wt=[], offset=[], contrasts = Dict()) if d == Binomial() && isempty(wt) d = Bernoulli() end - LMM = lmm(f, fr; weights = wt, contrasts=contrasts, rdist=d) + LMM = LinearMixedModel(f, fr; weights = wt, contrasts=contrasts, rdist=d) X = LMM.trms[end - 1].x T = eltype(X) y = copy(model_response(LMM)) @@ -44,8 +37,14 @@ function glmm(f::Formula, fr::AbstractDataFrame, d::Distribution, l::Link; wt=[] res end -glmm(f::Formula, fr::AbstractDataFrame, d::Distribution; wt=[], offset=[], contrasts=Dict()) = - glmm(f, fr, d, GLM.canonicallink(d), wt=wt, offset=offset, contrasts=contrasts) +GeneralizedLinearMixedModel(f::Formula, fr::AbstractDataFrame, d::Distribution; wt=[], offset=[], contrasts=Dict()) = + GeneralizedLinearMixedModel(f, fr, d, GLM.canonicallink(d), wt=wt, offset=offset, contrasts=contrasts) + +fit(::Type{GeneralizedLinearMixedModel}, f::Formula, fr::AbstractDataFrame, d::Distribution) = + fit!(GeneralizedLinearMixedModel(f, fr, d, GLM.canonicallink(d))) + +fit(::Type{GeneralizedLinearMixedModel}, f::Formula, fr::AbstractDataFrame, d::Distribution, l::Link) = + fit!(GeneralizedLinearMixedModel(f, fr, d, l)) """ LaplaceDeviance(m::GeneralizedLinearMixedModel{T})::T where T @@ -217,7 +216,7 @@ function StatsBase.fit!(m::GeneralizedLinearMixedModel{T}; optsum.lowerbd = vcat(fill!(similar(β), T(-Inf)), optsum.lowerbd) optsum.initial = vcat(β, m.θ) optsum.final = copy(optsum.initial) - optsum.initial_step = vcat(stderr(m) ./ 3, min.(T(0.05), m.θ ./ 4)) + optsum.initial_step = vcat(StatsBase.stderr(m) ./ 3, min.(T(0.05), m.θ ./ 4)) end setpar! = fast ? setθ! : setβθ! feval = 0 diff --git a/src/deprecates.jl b/src/deprecates.jl new file mode 100644 index 000000000..5d1ee3a62 --- /dev/null +++ b/src/deprecates.jl @@ -0,0 +1,3 @@ +@deprecate lmm(f::Formula, fr::AbstractDataFrame) LinearMixedModel(f, fr) +@deprecate glmm(f::Formula, fr::AbstractDataFrame, d::Distribution) GeneralizedLinearMixedModel(f, fr, d) +@deprecate glmm(f::Formula, fr::AbstractDataFrame, d::Distribution, l::Link) GeneralizedLinearMixedModel(f, fr, d, l) diff --git a/src/mixedmodel.jl b/src/mixedmodel.jl index 03d68edd6..4e72c711c 100644 --- a/src/mixedmodel.jl +++ b/src/mixedmodel.jl @@ -34,7 +34,7 @@ fixefnames(m::MixedModel) = lmm(m).trms[end - 1].cnames function StatsBase.coeftable(m::MixedModel) co = coef(m) - se = stderr(m) + se = StatsBase.stderr(m) z = co ./ se pvalue = ccdf.(Chisq(1), abs2.(z)) CoefTable(hcat(co, se, z, pvalue), ["Estimate", "Std.Error", "z value", "P(>|z|)"], diff --git a/src/modelterms.jl b/src/modelterms.jl index 4a98cbd7d..5c9dd7115 100644 --- a/src/modelterms.jl +++ b/src/modelterms.jl @@ -258,7 +258,7 @@ rowlengths(A::MatrixTerm{T}) where {T} = T[] vsize(A::VectorFactorReTerm{T,V,R,S}) where {T,V,R,S} = S -Base.eltype(::AbstractFactorReTerm{T}) where {T} = T +eltype(::AbstractFactorReTerm{T}) where {T} = T Base.full(A::AbstractFactorReTerm) = full(sparse(A)) diff --git a/src/pls.jl b/src/pls.jl index efcede16a..f0ecf8201 100644 --- a/src/pls.jl +++ b/src/pls.jl @@ -58,28 +58,8 @@ function LinearMixedModel(f, trms, wts) LinearMixedModel(f, trms, sqrtwts, A, LowerTriangular(L), optsum) end -model_response(mf::ModelFrame, d::Distribution=Normal()) = - model_response(mf.df[mf.terms.eterms[1]], d) - -model_response(v::AbstractVector, d::Distribution) = Vector{partype(d)}(v) - -function model_response(v::CategoricalVector, d::Bernoulli) - levs = levels(v) - nlevs = length(levs) - @argcheck(nlevs ≤ 2) - nlevs < 2 ? zeros(v, partype(d)) : partype(d)[cv == levs[2] for cv in v] -end - -""" - lmm(f::DataFrames.Formula, fr::DataFrames.DataFrame; weights = [], contrasts = Dict()) - -Create a `LinearMixedModel` from `f`, a formula that contains both fixed-effects terms -and random effects, and `fr`. - -The return value is ready to be `fit!` but has not yet been fit. -""" -function lmm(f::Formula, fr::AbstractDataFrame; - weights::Vector = [], contrasts=Dict(), rdist::Distribution=Normal()) +function LinearMixedModel(f::Formula, fr::AbstractDataFrame; + weights::Vector = [], contrasts=Dict(), rdist::Distribution=Normal()) mf = ModelFrame(f, fr, contrasts=contrasts) X = ModelMatrix(mf).m n = size(X, 1) @@ -117,7 +97,7 @@ function lmm(f::Formula, fr::AbstractDataFrame; push!(trms, length(coefnms) == 1 ? ScalarFactorReTerm(gr, m, m, grp, coefnms, one(T)) : VectorFactorReTerm(gr, transpose(reshape(m, (n, sum(trsize)))), grp, - coefnms, trsize)) + coefnms, trsize)) end end sort!(trms, by = nrandomeff, rev = true) @@ -126,6 +106,18 @@ function lmm(f::Formula, fr::AbstractDataFrame; LinearMixedModel(f, trms, oftype(y, weights)) end +model_response(mf::ModelFrame, d::Distribution) = model_response(mf.df[mf.terms.eterms[1]], d) + +model_response(v::AbstractVector, d::Distribution) = Vector{partype(d)}(v) + +function model_response(v::CategoricalVector, d::Bernoulli) + levs = levels(v) + nlevs = length(levs) + @argcheck(nlevs ≤ 2) + nlevs < 2 ? zeros(v, partype(d)) : partype(d)[cv == levs[2] for cv in v] +end + +fit(::Type{LinearMixedModel}, f::Formula, fr::AbstractDataFrame) = fit!(LinearMixedModel(f, fr)) """ updateL!(m::LinearMixedModel) diff --git a/test/pirls.jl b/test/pirls.jl index 3c2732a6c..b8da12a3c 100644 --- a/test/pirls.jl +++ b/test/pirls.jl @@ -1,5 +1,4 @@ -using Compat, DataFrames, RData, MixedModels -using Compat.Test +using Compat, Compat.Test, DataFrames, RData, MixedModels if !isdefined(:dat) || !isa(dat, Dict{Symbol, Any}) dat = convert(Dict{Symbol,Any}, load(joinpath(dirname(@__FILE__), "dat.rda"))) @@ -9,12 +8,12 @@ end contraception = dat[:Contraception] contraception[:a2] = abs2.(contraception[:a]) contraception[:urbdist] = string.(contraception[:urb], contraception[:d]) - gm0 = fit!(glmm(@formula(use ~ 1 + a + a2 + urb + l + (1 | urbdist)), contraception, + gm0 = fit!(GeneralizedLinearMixedModel(@formula(use ~ 1 + a + a2 + urb + l + (1 | urbdist)), contraception, Bernoulli()), fast = true); @test isapprox(getθ(gm0)[1], 0.5720734451352923, atol=0.001) @test isapprox(LaplaceDeviance(gm0), 2361.657188518064, atol=0.001) - gm1 = fit!(glmm(@formula(use ~ 1 + a + a2 + urb + l + (1 | urbdist)), contraception, - Bernoulli())); + gm1 = fit(GeneralizedLinearMixedModel, @formula(use ~ 1 + a + a2 + urb + l + (1 | urbdist)), contraception, + Bernoulli()); @test isapprox(gm1.θ[1], 0.573054, atol=0.005) @test lowerbd(gm1) == push!(fill(-Inf, 7), 0.) @test isapprox(LaplaceDeviance(gm1), 2361.57129, rtol=0.00001) @@ -31,8 +30,8 @@ end @testset "cbpp" begin cbpp = dat[:cbpp] cbpp[:prop] = cbpp[:i] ./ cbpp[:s] - gm2 = fit!(glmm(@formula(prop ~ 1 + p + (1 | h)), cbpp, Binomial(), - wt = Array(cbpp[:s]))); + gm2 = fit!(GeneralizedLinearMixedModel(@formula(prop ~ 1 + p + (1 | h)), cbpp, Binomial(), + wt = cbpp[:s])); @test isapprox(LaplaceDeviance(gm2), 100.09585619324639, atol=0.0001) @test isapprox(sum(abs2, gm2.u[1]), 9.723175126731014, atol=0.0001) @test isapprox(logdet(gm2), 16.90113, atol=0.0001) @@ -44,8 +43,8 @@ end @testset "verbagg" begin verbagg = dat[:VerbAgg] - gm3 = fit!(glmm(@formula(r2 ~ 1 + a + g + b + s + (1 | id) + (1 | item)), verbagg, - Bernoulli())); + gm3 = fit(GeneralizedLinearMixedModel, @formula(r2 ~ 1 + a + g + b + s + (1 | id) + (1 | item)), + verbagg, Bernoulli()); @test isapprox(LaplaceDeviance(gm3), 8151.39972809092, atol=0.001) @test lowerbd(gm3) == vcat(fill(-Inf, 6), zeros(2)) @test fitted(gm3) == predict(gm3) @@ -54,10 +53,10 @@ end @test isapprox(sum(gm3.resp.devresid), 7156.558983084621, atol=0.1) end -#= +#= Needs a method αβA_mul_Bc!(::Float64, ::SparseMatrixCSC{Float64,Int32}, ::SparseMatrixCSC{Float64,Int32}, ::Float64, ::SparseMatrixCSC{Float64,Int32}) @testset "grouseticks" begin - gm4 = fit!(glmm(@formula(t ~ 1 + y + ch + (1|i) + (1|b) + (1|l)), dat[:grouseticks], - Poisson())) + gm4 = fit(GeneralizedLinearMixedModel, @formula(t ~ 1 + y + ch + (1|i) + (1|b) + (1|l)), + dat[:grouseticks], Poisson()) @test isapprox(LaplaceDeviance(gm4), 849.5439802900257, atol=0.001) @test lowerbd(gm4) == vcat(fill(-Inf, 4), zeros(3)) # these two values are not well defined at the optimum diff --git a/test/pls.jl b/test/pls.jl index 9fdfc456d..acf5659c9 100644 --- a/test/pls.jl +++ b/test/pls.jl @@ -6,7 +6,7 @@ if !isdefined(:dat) || !isa(dat, Dict{Symbol, Any}) end @testset "Dyestuff" begin - fm1 = lmm(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff]) + fm1 = LinearMixedModel(@formula(Y ~ 1 + (1|G)), dat[:Dyestuff]) @test nblocks(fm1.A) == (3, 3) @test size(fm1.trms) == (3, ) @@ -54,7 +54,7 @@ end end @testset "Dyestuff2" begin - fm = lmm(@formula(Y ~ 1 + (1 | G)), dat[:Dyestuff2]) + fm = LinearMixedModel(@formula(Y ~ 1 + (1 | G)), dat[:Dyestuff2]) @test lowerbd(fm) == zeros(1) fit!(fm, true) show(IOBuffer(), fm) @@ -70,7 +70,7 @@ end end @testset "penicillin" begin - fm = lmm(@formula(Y ~ 1 + (1 | G) + (1 | H)), dat[:Penicillin]); + fm = LinearMixedModel(@formula(Y ~ 1 + (1 | G) + (1 | H)), dat[:Penicillin]); @test size(fm) == (144, 1, 30, 2) @test getθ(fm) == ones(2) @test lowerbd(fm) == zeros(2) @@ -93,7 +93,7 @@ end end @testset "pastes" begin - fm = lmm(@formula(Y ~ (1 | G) + (1 | H)), dat[:Pastes]) + fm = LinearMixedModel(@formula(Y ~ (1 | G) + (1 | H)), dat[:Pastes]) @test size(fm) == (60, 1, 40, 2) @test getθ(fm) == ones(2) @test lowerbd(fm) == zeros(2) @@ -113,7 +113,7 @@ end end @testset "InstEval" begin - fm1 = lmm(@formula(Y ~ 1 + A + (1 | G) + (1 | H) + (1 | I)), dat[:InstEval]) + fm1 = LinearMixedModel(@formula(Y ~ 1 + A + (1 | G) + (1 | H) + (1 | I)), dat[:InstEval]) @test size(fm1) == (73421, 2, 4114, 3) @test getθ(fm1) == ones(3) @test lowerbd(fm1) == zeros(3) @@ -129,13 +129,13 @@ end @test size(resid1) == (73421, ) @test isapprox(resid1[1], 1.82124, atol=0.00001) - fm2 = fit!(lmm(@formula(Y ~ 1 + A*I + (1 | G) + (1 | H)), dat[:InstEval])) + fm2 = fit!(LinearMixedModel(@formula(Y ~ 1 + A*I + (1 | G) + (1 | H)), dat[:InstEval])) @test isapprox(objective(fm2), 237585.5534151694, atol=0.001) @test size(fm2) == (73421, 28, 4100, 2) end @testset "sleep" begin - fm = lmm(@formula(Y ~ 1 + U + (1 + U | G)), dat[:sleepstudy]); + fm = LinearMixedModel(@formula(Y ~ 1 + U + (1 + U | G)), dat[:sleepstudy]); @test lowerbd(fm) == [0.0, -Inf, 0.0] A11 = fm.A[Block(1,1)] @test isa(A11, UniformBlockDiagonal{Float64}) @@ -179,7 +179,7 @@ end simulate!(fm) # to test one of the unscaledre methods - fmnc = lmm(@formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy]) + fmnc = LinearMixedModel(@formula(Y ~ 1 + U + (1|G) + (0+U|G)), dat[:sleepstudy]) @test size(fmnc) == (180,2,36,1) @test getθ(fmnc) == ones(2) @test lowerbd(fmnc) == zeros(2) @@ -199,13 +199,13 @@ end MixedModels.lrt(fm, fmnc) - fmrs = fit!(lmm(@formula(Y ~ 1 + U + (0 + U|G)), dat[:sleepstudy])) + fmrs = fit!(LinearMixedModel(@formula(Y ~ 1 + U + (0 + U|G)), dat[:sleepstudy])) @test isapprox(objective(fmrs), 1774.080315280528, rtol=0.00001) @test isapprox(getθ(fmrs), [0.24353985679033105], rtol=0.00001) end @testset "d3" begin - fm = updateL!(lmm(@formula(Y ~ 1 + U + (1+U|G) + (1+U|H) + (1+U|I)), dat[:d3])); + fm = updateL!(LinearMixedModel(@formula(Y ~ 1 + U + (1+U|G) + (1+U|H) + (1+U|I)), dat[:d3])); @test isapprox(pwrss(fm), 5.1261847180180885e6, rtol = 1e-6) @test isapprox(logdet(fm), 52718.0137366602, rtol = 1e-6) @test isapprox(objective(fm), 901641.2930413672, rtol = 1e-6) @@ -216,7 +216,7 @@ end @testset "simulate!" begin @test MixedModels.stddevcor(cholfact!(eye(3))) == (ones(3), eye(3)) - fm = fit!(lmm(@formula(Y ~ 1 + (1 | G)), dat[:Dyestuff])) + fm = fit!(LinearMixedModel(@formula(Y ~ 1 + (1 | G)), dat[:Dyestuff])) refit!(simulate!(MersenneTwister(1234321), fm)) @test isapprox(deviance(fm), 339.0218639362958, atol=0.001) refit!(fm, dat[:Dyestuff][:Y])