Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
83dfeda
add parameter table sorting
Maximilian-Stefan-Ernst Jun 24, 2022
e826b68
closes #108
Maximilian-Stefan-Ernst Jun 24, 2022
e53605d
Update loss.md
brandmaier Jun 29, 2022
2d5902a
Merge pull request #135 from StructuralEquationModels/main
Maximilian-Stefan-Ernst Jul 1, 2022
8ad5e44
adding headers to the read me for better optics/ structure
LeonieHagitte Dec 25, 2022
38a07e5
forgot to capitalise one letter
LeonieHagitte Dec 25, 2022
059a428
fix bug in update_observed method for RAMSymbolic
Maximilian-Stefan-Ernst Jan 29, 2023
ebbeda9
Merge pull request #143 from StructuralEquationModels/main
Maximilian-Stefan-Ernst Jun 9, 2023
30f8186
Merge pull request #141 from LeonieHagitte/feat-small-changes
Maximilian-Stefan-Ernst Jun 9, 2023
279bd60
Merge pull request #133 from brandmaier/patch-1
Maximilian-Stefan-Ernst Jun 9, 2023
ea4af73
Merge pull request #144 from StructuralEquationModels/main
Maximilian-Stefan-Ernst Jun 9, 2023
626f753
close #132
Maximilian-Stefan-Ernst Jun 9, 2023
b9e8155
close #108
Maximilian-Stefan-Ernst Jun 9, 2023
aecb196
close #130
Maximilian-Stefan-Ernst Jun 9, 2023
3616ad1
close #139
Maximilian-Stefan-Ernst Jun 9, 2023
4477cbd
add forgotten file
Maximilian-Stefan-Ernst Jun 9, 2023
33cd9f0
update Test syntax for broken unit tests
Maximilian-Stefan-Ernst Jun 9, 2023
b6ebe48
close #117
Maximilian-Stefan-Ernst Jun 9, 2023
870f89d
typo
Maximilian-Stefan-Ernst Jun 10, 2023
61a39b0
add warning for WLS standard errors
Maximilian-Stefan-Ernst Jun 10, 2023
b2c4b64
update package version
Maximilian-Stefan-Ernst Jun 13, 2023
00b69f7
update julia version for CI and docs
Maximilian-Stefan-Ernst Jun 13, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
fail-fast: true
matrix:
version:
- '1.7.3'
- '1'
os:
- ubuntu-latest
arch:
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/CI_ecosystem.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ jobs:
fail-fast: false
matrix:
version:
- '1.7.3'
- '1'
- '1.9'
os:
- ubuntu-latest
- macos-latest
Expand All @@ -42,4 +43,4 @@ jobs:
- uses: julia-actions/julia-processcoverage@v1
- uses: codecov/codecov-action@v2
with:
file: lcov.info
file: lcov.info
5 changes: 3 additions & 2 deletions .github/workflows/CI_extended.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ jobs:
fail-fast: false
matrix:
version:
- '1.7.3'
- '1'
- '1.9'
os:
- ubuntu-latest
- macos-latest
Expand All @@ -42,4 +43,4 @@ jobs:
- uses: julia-actions/julia-processcoverage@v1
- uses: codecov/codecov-action@v2
with:
file: lcov.info
file: lcov.info
File renamed without changes.
2 changes: 1 addition & 1 deletion .github/workflows/documentation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
- uses: actions/checkout@v2
- uses: julia-actions/setup-julia@latest
with:
version: '1.7.2'
version: '1'
- name: Install dependencies
run: julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()'
- name: Build and deploy
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/preview-documentation.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
- uses: actions/checkout@v2
- uses: julia-actions/setup-julia@latest
with:
version: '1.6'
version: '1'
- name: Install dependencies
run: julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()'
- name: Build and deploy
Expand All @@ -36,4 +36,4 @@ jobs:
owner: context.repo.owner,
repo: context.repo.repo,
body: 'Docs can be previewed here: ${{ env.GHP_URL }}previews/PR${{ env.PR_NUMBER }}'
})
})
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "StructuralEquationModels"
uuid = "383ca8c5-e4ff-4104-b0a9-f7b279deed53"
authors = ["Maximilian Ernst", "Aaron Peikert"]
version = "0.1.0"
version = "0.2.0"

[deps]
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Expand All @@ -24,7 +24,7 @@ StenoGraphs = "78862bba-adae-4a83-bb4d-33c106177f81"
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"

[compat]
julia = "1.7"
julia = "1.9"
StenoGraphs = "0.2"
DataFrames = "1"
Distributions = "0.25"
Expand Down
8 changes: 6 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
|:-------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------:|
| [![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://structuralequationmodels.github.io/StructuralEquationModels.jl/) [![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://structuralequationmodels.github.io/StructuralEquationModels.jl/dev/) | [![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) [![Github Action CI](https://github.com/StructuralEquationModels/StructuralEquationModels.jl/workflows/CI_extended/badge.svg)](https://github.com/StructuralEquationModels/StructuralEquationModels.jl/actions/) [![codecov](https://codecov.io/gh/StructuralEquationModels/StructuralEquationModels.jl/branch/main/graph/badge.svg?token=P2kjzpvM4V)](https://codecov.io/gh/StructuralEquationModels/StructuralEquationModels.jl) | [![DOI](https://zenodo.org/badge/228649704.svg)](https://zenodo.org/badge/latestdoi/228649704) |

# What is this Package for?

This is a package for Structural Equation Modeling.
It is still *in development*.
Expand All @@ -14,14 +15,17 @@ Models you can fit include
- Multigroup SEM
- Sums of arbitrary loss functions (everything the optimizer can handle).

# What are the merrits?

We provide fast objective functions, gradients, and for some cases hessians as well as approximations thereof.
As a user, you can easily define custom loss functions.
For those, you can decide to provide analytical gradients or use finite difference approximation / automatic differentiation.
You can choose to mix and match loss functions natively found in this package and those you provide.
In such cases, you optimize over a sum of different objectives (e.g. ML + Ridge).
This mix and match strategy also applies to gradients, where you may supply analytic gradients or opt for automatic differentiation or mix analytical and automatic differentiation.

You may consider using this package if:
# You may consider using this package if:

- you want to extend SEM (e.g. add a new objective function) and need an extendable framework
- you want to extend SEM, and your implementation needs to be fast (because you want to do a simulation, for example)
- you want to fit the same model(s) to many datasets (bootstrapping, simulation studies)
Expand All @@ -33,7 +37,7 @@ The package makes use of
- Optim.jl and NLopt.jl to provide a range of different Optimizers/Linesearches.
- FiniteDiff.jl and ForwardDiff.jl to provide gradients for user-defined loss functions.

At the moment, we are still working on
# At the moment, we are still working on:
- optimizing performance for big models (with hundreds of parameters)

# Questions?
Expand Down
4 changes: 2 additions & 2 deletions docs/src/developer/loss.md
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ Let's make a sligtly more complicated example: we will reimplement maximum likel
To keep it simple, we only cover models without a meanstructure. The maximum likelihood objective is defined as

```math
F_{ML} = \log \det \Sigma_i + \mathrm{tr}(\Sigma_i \Sigma_o)
F_{ML} = \log \det \Sigma_i + \mathrm{tr}\left(\Sigma_{i}^{-1} \Sigma_o \right)
```

where ``\Sigma_i`` is the model implied covariance matrix and ``\Sigma_o`` is the observed covariance matrix. We can query the model implied covariance matrix from the `imply` par of our model, and the observed covariance matrix from the `observed` path of our model.
Expand Down Expand Up @@ -269,4 +269,4 @@ model_ml = SemFiniteDiff(
model_fit = sem_fit(model_ml)
```

If you want to differentiate your own loss functions via automatic differentiation, check out the [AutoDiffSEM](https://github.com/StructuralEquationModels/AutoDiffSEM) package (spoiler allert: it's really easy).
If you want to differentiate your own loss functions via automatic differentiation, check out the [AutoDiffSEM](https://github.com/StructuralEquationModels/AutoDiffSEM) package (spoiler allert: it's really easy).
1 change: 1 addition & 0 deletions src/StructuralEquationModels.jl
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,7 @@ export AbstractSem,
SemConstant, SemWLS, loss,
SemOptimizer,
SemOptimizerEmpty, SemOptimizerOptim, SemOptimizerNLopt, NLoptConstraint,
optimizer, n_iterations, convergence,
SemObserved,
SemObservedData, SemObservedCovariance, SemObservedMissing, observed,
sem_fit,
Expand Down
9 changes: 7 additions & 2 deletions src/frontend/fit/standard_errors/hessian.jl
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ function se_hessian(sem_fit::SemFit; hessian = :finitediff)
elseif hessian == :expected
throw(ArgumentError("standard errors based on the expected hessian are not implemented yet"))
else
throw(ArgumentError("I dont know how to compute `$how` standard-errors"))
throw(ArgumentError("I dont know how to compute `$hessian` standard-errors"))
end

invH = c*inv(H)
Expand All @@ -44,9 +44,14 @@ H_scaling(model::AbstractSemSingle) =
model.optimizer,
model.loss.functions...)

H_scaling(model, obs, imp, optimizer, lossfun::Union{SemML, SemWLS}) =
H_scaling(model, obs, imp, optimizer, lossfun::SemML) =
2/(n_obs(model)-1)

function H_scaling(model, obs, imp, optimizer, lossfun::SemWLS)
@warn "Standard errors for WLS are only correct if a GLS weight matrix (the default) is used."
return 2/(n_obs(model)-1)
end

H_scaling(model, obs, imp, optimizer, lossfun::SemFIML) =
2/n_obs(model)

Expand Down
17 changes: 16 additions & 1 deletion src/frontend/specification/EnsembleParameterTable.jl
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,22 @@ end

# Sorting ----------------------------------------------------------------------------------

# todo
# Sorting ----------------------------------------------------------------------------------

function sort!(ensemble_partable::EnsembleParameterTable)

for partable in values(ensemble_partable.tables)
sort!(partable)
end

return ensemble_partable
end

function sort(partable::EnsembleParameterTable)
new_partable = deepcopy(partable)
sort!(new_partable)
return new_partable
end

# add a row --------------------------------------------------------------------------------

Expand Down
2 changes: 1 addition & 1 deletion src/imply/RAM/symbolic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ identifier(imply::RAMSymbolic) = imply.identifier
n_par(imply::RAMSymbolic) = imply.n_par

function update_observed(imply::RAMSymbolic, observed::SemObserved; kwargs...)
if n_man(observed) == size(imply.Σ)
if Int(n_man(observed)) == size(imply.Σ, 1)
return imply
else
return RAMSymbolic(;observed = observed, kwargs...)
Expand Down
5 changes: 5 additions & 0 deletions src/imply/empty.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
############################################################################################
"""
Empty placeholder for models that don't need an imply part.
(For example, models that only regularize parameters.)

# Constructor

Expand All @@ -11,6 +12,10 @@ Empty placeholder for models that don't need an imply part.
# Arguments
- `specification`: either a `RAMMatrices` or `ParameterTable` object

# Examples
A multigroup model with ridge regularization could be specified as a `SemEnsemble` with one
model per group and an additional model with `ImplyEmpty` and `SemRidge` for the regularization part.

# Extended help

## Interfaces
Expand Down
67 changes: 64 additions & 3 deletions test/examples/multigroup/build_models.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,8 @@ model_g2 = Sem(

model_ml_multigroup = SemEnsemble(model_g1, model_g2; optimizer = semoptimizer)

############################################################################################
### test gradients
############################################################################################

# gradients
@testset "ml_gradients_multigroup" begin
@test test_gradient(model_ml_multigroup, start_test; atol = 1e-9)
end
Expand Down Expand Up @@ -49,6 +47,69 @@ end
lav_groups = Dict(:Pasteur => 1, :Grant_White => 2))
end

############################################################################################
# ML estimation - sorted
############################################################################################

partable_s = sort(partable)

specification_s = RAMMatrices(partable_s)

specification_g1_s = specification_s[:Pasteur]
specification_g2_s = specification_s[:Grant_White]

model_g1 = Sem(
specification = specification_g1_s,
data = dat_g1,
imply = RAMSymbolic
)

model_g2 = Sem(
specification = specification_g2_s,
data = dat_g2,
imply = RAM
)

model_ml_multigroup = SemEnsemble(model_g1, model_g2; optimizer = semoptimizer)

# gradients
@testset "ml_gradients_multigroup | sorted" begin
@test test_gradient(model_ml_multigroup, start_test; atol = 1e-2)
end

grad = similar(start_test)
gradient!(grad, model_ml_multigroup, rand(36))
grad_fd = FiniteDiff.finite_difference_gradient(x -> objective!(model_ml_multigroup, x), start_test)

# fit
@testset "ml_solution_multigroup | sorted" begin
solution = sem_fit(model_ml_multigroup)
update_estimate!(partable_s, solution)
@test compare_estimates(
partable,
solution_lav[:parameter_estimates_ml]; atol = 1e-4,
lav_groups = Dict(:Pasteur => 1, :Grant_White => 2))
end

@testset "fitmeasures/se_ml | sorted" begin
solution_ml = sem_fit(model_ml_multigroup)
@test all(test_fitmeasures(
fit_measures(solution_ml),
solution_lav[:fitmeasures_ml]; rtol = 1e-2, atol = 1e-7))

update_partable!(
partable_s, identifier(model_ml_multigroup), se_hessian(solution_ml), :se)
@test compare_estimates(
partable_s,
solution_lav[:parameter_estimates_ml]; atol = 1e-3,
col = :se, lav_col = :se,
lav_groups = Dict(:Pasteur => 1, :Grant_White => 2))
end

@testset "sorted | LowerTriangular A" begin
@test imply(model_ml_multigroup.sems[2]).A isa LowerTriangular
end

############################################################################################
# ML estimation - user defined loss function
############################################################################################
Expand Down
2 changes: 1 addition & 1 deletion test/examples/multigroup/multigroup.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
using StructuralEquationModels, Test, FiniteDiff
import LinearAlgebra: diagind
import LinearAlgebra: diagind, LowerTriangular
# import StructuralEquationModels as SEM
include(
joinpath(chop(dirname(pathof(StructuralEquationModels)), tail = 3),
Expand Down
2 changes: 1 addition & 1 deletion test/examples/political_democracy/constraints.jl
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,6 @@ end
solution_constrained = sem_fit(model_ml_constrained)
@test solution_constrained.solution[31]*solution_constrained.solution[30] >= 0.6
@test all(abs.(solution_constrained.solution) .< 10)
@test_skip solution_constrained.optimization_result.result[3] == :FTOL_REACHED
@test solution_constrained.optimization_result.result[3] == :FTOL_REACHED skip=true
@test abs(solution_constrained.minimum - 21.21) < 0.01
end