Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 6 additions & 7 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
name = "EllipticalSliceSampling"
uuid = "cad2338a-1db2-11e9-3401-43bc07c9ede2"
authors = ["David Widmann <dev+github@devmotion.de>"]
version = "0.1.0"
authors = ["David Widmann <dev+git@devmotion.de>"]
version = "0.2.0"

[deps]
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
Parameters = "d96e819e-fc66-5662-9728-84c9c7592b0a"
ProgressLogging = "33c8b6b6-d38a-422a-b730-caa89a2f386c"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"

[compat]
AbstractMCMC = "0.5"
ArrayInterface = "2"
Distributions = "0.21.8"
Parameters = "0.12"
ProgressLogging = "0.1"
Distributions = "0.22"
julia = "1"

[extras]
Expand Down
18 changes: 12 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,22 @@ priors with non-zero means and handle the change of variables internally.

Probably most users would like to use the exported function
```julia
ESS_mcmc([rng::AbstracRNG, ]prior, loglikelihood, N::Int[; burnin::Int = 0])
ESS_mcmc([rng, ]prior, loglikelihood, N[; kwargs...])
```
which returns a vector of `N` samples for approximating the posterior of
a model with a Gaussian prior that allows sampling from the `prior` and
evaluation of the log likelihood `loglikelihood`. The burn-in phase with
`burnin` samples is discarded.
evaluation of the log likelihood `loglikelihood`.

If you want to have more control about the sampling procedure (e.g., if you
only want to save a subset of samples or want to use another stopping
criterion), the function
```julia
ESS_mcmc_sampler([rng::AbstractRNG, ]prior, loglikelihood)
AbstractMCMC.steps!(
[rng,]
EllipticalSliceSampling.Model(prior, loglikelihood),
EllipticalSliceSampling.EllipticalSliceSampler();
kwargs...
)
```
gives you access to an iterator from which you can generate an unlimited
number of samples.
Expand All @@ -56,9 +60,11 @@ use a custom distribution type `GaussianPrior`, the following methods should be
implemented:
```julia
# state that the distribution is actually Gaussian
EllipticalSliceSampling.isnormal(::Type{<:GaussianPrior}) = true
EllipticalSliceSampling.isgaussian(::Type{<:GaussianPrior}) = true

# define the mean of the distribution
# alternatively implement `proposal(prior, ...)` and
# `proposal!(out, prior, ...)` (only if the samples are mutable)
Statistics.mean(dist::GaussianPrior) = ...

# define how to sample from the distribution
Expand Down Expand Up @@ -87,7 +93,7 @@ be useful.
### Progress monitor

If you use a package such as [Juno](https://junolab.org/) or
[ConsoleProgressMonitor.jl](https://github.com/tkf/ConsoleProgressMonitor.jl) that supports
[TerminalLoggers.jl](https://github.com/c42f/TerminalLoggers.jl) that supports
progress logs created by the
[ProgressLogging.jl](https://github.com/JunoLab/ProgressLogging.jl) API, then you can
monitor the progress of the sampling algorithm.
Expand Down
18 changes: 9 additions & 9 deletions src/EllipticalSliceSampling.jl
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
module EllipticalSliceSampling

using ArrayInterface
using Distributions
using Parameters
using ProgressLogging
import AbstractMCMC
import ArrayInterface
import Distributions

using Random
import Random
import Statistics

export ESS_mcmc, ESS_mcmc_sampler
export ESS_mcmc

include("utils.jl")
include("types.jl")
include("iterator.jl")
include("abstractmcmc.jl")
include("model.jl")
include("distributions.jl")
include("interface.jl")

end # module
106 changes: 106 additions & 0 deletions src/abstractmcmc.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
# elliptical slice sampler
struct EllipticalSliceSampler <: AbstractMCMC.AbstractSampler end

# state of the elliptical slice sampler
struct EllipticalSliceSamplerState{S,L}
"Sample of the elliptical slice sampler."
sample::S
"Log-likelihood of the sample."
loglikelihood::L
end

# first step of the elliptical slice sampler
function AbstractMCMC.step!(
rng::Random.AbstractRNG,
model::AbstractMCMC.AbstractModel,
::EllipticalSliceSampler,
N::Integer,
::Nothing;
kwargs...
)
# initial sample from the Gaussian prior
f = initial_sample(rng, model)

# compute log-likelihood of the initial sample
loglikelihood = Distributions.loglikelihood(model, f)

return EllipticalSliceSamplerState(f, loglikelihood)
end

# subsequent steps of the elliptical slice sampler
function AbstractMCMC.step!(
rng::Random.AbstractRNG,
model::AbstractMCMC.AbstractModel,
::EllipticalSliceSampler,
N::Integer,
state::EllipticalSliceSamplerState;
kwargs...
)
# sample from Gaussian prior
ν = sample_prior(rng, model)

# sample log-likelihood threshold
loglikelihood = state.loglikelihood
threshold = loglikelihood - Random.randexp(rng)

# sample initial angle
θ = 2 * π * rand(rng)
θmin = θ - 2 * π
θmax = θ

# compute the proposal
f = state.sample
fnext = proposal(model, f, ν, θ)

# compute the log-likelihood of the proposal
loglikelihood = Distributions.loglikelihood(model, fnext)

# stop if the log-likelihood threshold is reached
while loglikelihood < threshold
# shrink the bracket
if θ < zero(θ)
θmin = θ
else
θmax = θ
end

# sample angle
θ = θmin + rand(rng) * (θmax - θmin)

# recompute the proposal
if ArrayInterface.ismutable(fnext)
proposal!(fnext, model, f, ν, θ)
else
fnext = proposal(model, f, ν, θ)
end

# compute the log-likelihood of the proposal
loglikelihood = Distributions.loglikelihood(model, fnext)
end

return EllipticalSliceSamplerState(fnext, loglikelihood)
end

# only save the samples by default
function AbstractMCMC.transitions_init(
state::EllipticalSliceSamplerState,
model::AbstractMCMC.AbstractModel,
::EllipticalSliceSampler,
N::Integer;
kwargs...
)
return Vector{typeof(state.sample)}(undef, N)
end

function AbstractMCMC.transitions_save!(
samples::AbstractVector{S},
iteration::Integer,
state::EllipticalSliceSamplerState{S},
model::AbstractMCMC.AbstractModel,
::EllipticalSliceSampler,
N::Integer;
kwargs...
) where S
samples[iteration] = state.sample
return
end
54 changes: 54 additions & 0 deletions src/distributions.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
# define the element type of the samples
randtype(::Type{D}) where {D<:Distributions.MultivariateDistribution} = Vector{eltype(D)}
randtype(::Type{D}) where {D<:Distributions.MatrixDistribution} = Matrix{eltype(D)}
function randtype(
::Type{D}
) where {D<:Distributions.Sampleable{Distributions.Multivariate}}
return Vector{eltype(D)}
end
function randtype(
::Type{D}
) where {D<:Distributions.Sampleable{Distributions.Matrixvariate}}
return Matrix{eltype(D)}
end

# define trait for Gaussian distributions
isgaussian(::Type{<:Distributions.Normal}) = true
isgaussian(::Type{<:Distributions.NormalCanon}) = true
isgaussian(::Type{<:Distributions.AbstractMvNormal}) = true

# compute the proposal of the next sample
function proposal(prior::Distributions.Normal, f::Real, ν::Real, θ)
sinθ, cosθ = sincos(θ)
μ = prior.μ
if iszero(μ)
return cosθ * f + sinθ * ν
else
a = 1 - (sinθ + cosθ)
return cosθ * f + sinθ * ν + a * μ
end
end

function proposal(
prior::Distributions.MvNormal,
f::AbstractVector{<:Real},
ν::AbstractVector{<:Real},
θ
)
sinθ, cosθ = sincos(θ)
a = 1 - (sinθ + cosθ)
return @. cosθ * f + sinθ * ν + a * prior.μ
end

function proposal!(
out::AbstractVector{<:Real},
prior::Distributions.MvNormal,
f::AbstractVector{<:Real},
ν::AbstractVector{<:Real},
θ
)
sinθ, cosθ = sincos(θ)
a = 1 - (sinθ + cosθ)
@. out = cosθ * f + sinθ * ν + a * prior.μ
return out
end
95 changes: 67 additions & 28 deletions src/interface.jl
Original file line number Diff line number Diff line change
@@ -1,33 +1,72 @@
# perform elliptical slice sampling for a fixed number of iterations
ESS_mcmc(prior, loglikelihood, N::Int; kwargs...) =
ESS_mcmc(Random.GLOBAL_RNG, prior, loglikelihood, N; kwargs...)
# public interface

function ESS_mcmc(rng::AbstractRNG, prior, loglikelihood, N::Int; burnin::Int = 0)
# define the internal model
"""
ESS_mcmc([rng, ]prior, loglikelihood, N; kwargs...)

Create a Markov chain of `N` samples for a model with given `prior` and `loglikelihood`
functions using the elliptical slice sampling algorithm.
"""
function ESS_mcmc(
rng::Random.AbstractRNG,
prior,
loglikelihood,
N::Integer;
kwargs...
)
model = Model(prior, loglikelihood)
return AbstractMCMC.sample(rng, model, EllipticalSliceSampler(), N; kwargs...)
end

function ESS_mcmc(prior, loglikelihood, N::Integer; kwargs...)
return ESS_mcmc(Random.GLOBAL_RNG, prior, loglikelihood, N; kwargs...)
end

# private interface

"""
initial_sample(rng, model)

Return the initial sample for the `model` using the random number generator `rng`.

# create the sampler
sampler = EllipticalSliceSampler(rng, model)

# create MCMC chain
chain = Vector{eltype(sampler)}(undef, N)
niters = N + burnin
@withprogress name = "Performing elliptical slice sampling" begin
# discard burnin phase
for (i, _) in zip(1:burnin, sampler)
@logprogress i / niters
end

for (i, f) in zip(1:N, sampler)
@inbounds chain[i] = f
@logprogress (i + burnin) / niters
end
end

chain
By default, sample from the prior by calling [`sample_prior(rng, model)`](@ref).
"""
function initial_sample(rng::Random.AbstractRNG, model::AbstractMCMC.AbstractModel)
return sample_prior(rng, model)
end

# create an elliptical slice sampler
ESS_mcmc_sampler(prior, loglikelihood) = ESS_mcmc_sampler(Random.GLOBAL_RNG, prior, loglikelihood)
ESS_mcmc_sampler(rng::AbstractRNG, prior, loglikelihood) =
EllipticalSliceSampler(rng, Model(prior, loglikelihood))
"""
sample_prior(rng, model)

Sample from the prior of the `model` using the random number generator `rng`.
"""
function sample_prior(::Random.AbstractRNG, ::AbstractMCMC.AbstractModel) end

"""
proposal(model, f, ν, θ)

Compute the proposal for the next sample in the elliptical slice sampling algorithm for the
`model` from the previous sample `f`, the sample `ν` from the Gaussian prior, and the angle
`θ`.

Mathematically, the proposal can be computed as
```math
\\cos θ f + ν \\sin θ ν + μ (1 - \\sin θ + \\cos θ),
```
where ``μ`` is the mean of the Gaussian prior.
"""
function proposal(model::AbstractMCMC.AbstractModel, f, ν, θ) end

"""
proposal!(out, model, f, ν, θ)

Compute the proposal for the next sample in the elliptical slice sampling algorithm for the
`model` from the previous sample `f`, the sample `ν` from the Gaussian prior, and the angle
`θ`, and save it to `out`.

Mathematically, the proposal can be computed as
```math
\\cos θ f + ν \\sin θ ν + μ (1 - \\sin θ + \\cos θ),
```
where ``μ`` is the mean of the Gaussian prior.
"""
function proposal!(out, model::AbstractMCMC.AbstractModel, f, ν, θ) end
Loading