diff --git a/Turing/src/contrib/inference/AdvancedSMCExtensions.jl b/Turing/src/contrib/inference/AdvancedSMCExtensions.jl new file mode 100644 index 00000000..921662b6 --- /dev/null +++ b/Turing/src/contrib/inference/AdvancedSMCExtensions.jl @@ -0,0 +1,326 @@ + +#### +#### Particle marginal Metropolis-Hastings sampler. +#### + +""" + PMMH(n_iters::Int, smc_alg:::SMC, parameters_algs::Tuple{MH}) + +Particle independant Metropolis–Hastings and +Particle marginal Metropolis–Hastings samplers. + +Note that this method is particle-based, and arrays of variables +must be stored in a [`TArray`](@ref) object. + +Usage: + +```julia +alg = PMMH(100, SMC(20, :v1), MH(1,:v2)) +alg = PMMH(100, SMC(20, :v1), MH(1,(:v2, (x) -> Normal(x, 1)))) +``` + +Arguments: + +- `n_iters::Int` : Number of iterations to run. +- `smc_alg:::SMC` : An [`SMC`](@ref) algorithm to use. +- `parameters_algs::Tuple{MH}` : An [`MH`](@ref) algorithm, which includes a +sample space specification. +""" +mutable struct PMMH{space, A<:Tuple} <: InferenceAlgorithm + n_iters::Int # number of iterations + algs::A # Proposals for state & parameters +end +function PMMH(n_iters::Int, algs::Tuple, space::Tuple) + return PMMH{space, typeof(algs)}(n_iters, algs) +end +function PMMH(n_iters::Int, smc_alg::SMC, parameter_algs...) + return PMMH(n_iters, tuple(parameter_algs..., smc_alg), ()) +end + +PIMH(n_iters::Int, smc_alg::SMC) = PMMH(n_iters, tuple(smc_alg), ()) + +function Sampler(alg::PMMH, model::Model, s::Selector) + info = Dict{Symbol, Any}() + spl = Sampler(alg, info, s) + + alg_str = "PMMH" + n_samplers = length(alg.algs) + samplers = Array{Sampler}(undef, n_samplers) + + space = Set{Symbol}() + + for i in 1:n_samplers + sub_alg = alg.algs[i] + if isa(sub_alg, Union{SMC, MH}) + samplers[i] = Sampler(sub_alg, model, Selector(Symbol(typeof(sub_alg)))) + else + error("[$alg_str] unsupport base sampling algorithm $alg") + end + if typeof(sub_alg) == MH && sub_alg.n_iters != 1 + warn( + "[$alg_str] number of iterations greater than 1" * + "is useless for MH since it is only used for its proposal" + ) + end + space = union(space, sub_alg.space) + end + + info[:old_likelihood_estimate] = -Inf # Force to accept first proposal + info[:old_prior_prob] = 0.0 + info[:samplers] = samplers + + return spl +end + +function step(model, spl::Sampler{<:PMMH}, vi::VarInfo, is_first::Bool) + violating_support = false + proposal_ratio = 0.0 + new_prior_prob = 0.0 + new_likelihood_estimate = 0.0 + old_θ = copy(vi[spl]) + + @debug "Propose new parameters from proposals..." + for local_spl in spl.info[:samplers][1:end-1] + @debug "$(typeof(local_spl)) proposing $(local_spl.alg.space)..." + propose(model, local_spl, vi) + if local_spl.info[:violating_support] violating_support=true; break end + new_prior_prob += local_spl.info[:prior_prob] + proposal_ratio += local_spl.info[:proposal_ratio] + end + + if violating_support + # do not run SMC if going to refuse anyway + accepted = false + else + @debug "Propose new state with SMC..." + vi, _ = step(model, spl.info[:samplers][end], vi) + new_likelihood_estimate = spl.info[:samplers][end].info[:logevidence][end] + + @debug "Decide whether to accept..." + accepted = mh_accept( + spl.info[:old_likelihood_estimate] + spl.info[:old_prior_prob], + new_likelihood_estimate + new_prior_prob, + proposal_ratio, + ) + end + + if accepted + spl.info[:old_likelihood_estimate] = new_likelihood_estimate + spl.info[:old_prior_prob] = new_prior_prob + else # rejected + vi[spl] = old_θ + end + + return vi, accepted +end + +function sample( model::Model, + alg::PMMH; + save_state=false, # flag for state saving + resume_from=nothing, # chain to continue + reuse_spl_n=0 # flag for spl re-using + ) + + spl = Sampler(alg, model) + if resume_from !== nothing + spl.selector = resume_from.info[:spl].selector + end + alg_str = "PMMH" + + # Number of samples to store + sample_n = spl.alg.n_iters + + # Init samples + time_total = zero(Float64) + samples = Array{Sample}(undef, sample_n) + weight = 1 / sample_n + for i = 1:sample_n + samples[i] = Sample(weight, Dict{Symbol, Any}()) + end + + # Init parameters + vi = if resume_from === nothing + vi_ = VarInfo(model) + else + resume_from.info[:vi] + end + n = spl.alg.n_iters + + # PMMH steps + accept_his = Bool[] + PROGRESS[] && (spl.info[:progress] = ProgressMeter.Progress(n, 1, "[$alg_str] Sampling...", 0)) + for i = 1:n + @debug "$alg_str stepping..." + time_elapsed = @elapsed vi, is_accept = step(model, spl, vi, i==1) + + if is_accept # accepted => store the new predcits + samples[i].value = Sample(vi, spl).value + else # rejected => store the previous predcits + samples[i] = samples[i - 1] + end + + time_total += time_elapsed + push!(accept_his, is_accept) + if PROGRESS[] + haskey(spl.info, :progress) && ProgressMeter.update!(spl.info[:progress], spl.info[:progress].counter + 1) + end + end + + println("[$alg_str] Finished with") + println(" Running time = $time_total;") + accept_rate = sum(accept_his) / n # calculate the accept rate + println(" Accept rate = $accept_rate;") + + if resume_from !== nothing # concat samples + pushfirst!(samples, resume_from.info[:samples]...) + end + c = Chain(-Inf, samples) # wrap the result by Chain + + if save_state # save state + c = save(c, spl, model, vi, samples) + end + + c +end + + +#### +#### IMCMC Sampler. +#### + +""" + IPMCMC(n_particles::Int, n_iters::Int, n_nodes::Int, n_csmc_nodes::Int) + +Particle Gibbs sampler. + +Note that this method is particle-based, and arrays of variables +must be stored in a [`TArray`](@ref) object. + +Usage: + +```julia +IPMCMC(100, 100, 4, 2) +``` + +Arguments: + +- `n_particles::Int` : Number of particles to use. +- `n_iters::Int` : Number of iterations to employ. +- `n_nodes::Int` : The number of nodes running SMC and CSMC. +- `n_csmc_nodes::Int` : The number of CSMC nodes. +``` + +A paper on this can be found [here](https://arxiv.org/abs/1602.05128). +""" +mutable struct IPMCMC{T, F} <: InferenceAlgorithm + n_particles::Int # number of particles used + n_iters::Int # number of iterations + n_nodes::Int # number of nodes running SMC and CSMC + n_csmc_nodes::Int # number of nodes CSMC + resampler::F # function to resample + space::Set{T} # sampling space, emtpy means all +end +IPMCMC(n1::Int, n2::Int) = IPMCMC(n1, n2, 32, 16, resample_systematic, Set()) +IPMCMC(n1::Int, n2::Int, n3::Int) = IPMCMC(n1, n2, n3, Int(ceil(n3/2)), resample_systematic, Set()) +IPMCMC(n1::Int, n2::Int, n3::Int, n4::Int) = IPMCMC(n1, n2, n3, n4, resample_systematic, Set()) +function IPMCMC(n1::Int, n2::Int, n3::Int, n4::Int, space...) + _space = isa(space, Symbol) ? Set([space]) : Set(space) + IPMCMC(n1, n2, n3, n4, resample_systematic, _space) +end + +function Sampler(alg::IPMCMC, s::Selector) + info = Dict{Symbol, Any}() + spl = Sampler(alg, info, s) + # Create SMC and CSMC nodes + samplers = Array{Sampler}(undef, alg.n_nodes) + # Use resampler_threshold=1.0 for SMC since adaptive resampling is invalid in this setting + default_CSMC = CSMC(alg.n_particles, 1, alg.resampler, alg.space) + default_SMC = SMC(alg.n_particles, alg.resampler, 1.0, false, alg.space) + + for i in 1:alg.n_csmc_nodes + samplers[i] = Sampler(default_CSMC, Selector(Symbol(typeof(default_CSMC)))) + end + for i in (alg.n_csmc_nodes+1):alg.n_nodes + samplers[i] = Sampler(default_SMC, Selector(Symbol(typeof(default_CSMC)))) + end + + info[:samplers] = samplers + + return spl +end + +function step(model, spl::Sampler{<:IPMCMC}, VarInfos::Array{VarInfo}, is_first::Bool) + # Initialise array for marginal likelihood estimators + log_zs = zeros(spl.alg.n_nodes) + + # Run SMC & CSMC nodes + for j in 1:spl.alg.n_nodes + reset_num_produce!(VarInfos[j]) + VarInfos[j] = step(model, spl.info[:samplers][j], VarInfos[j])[1] + log_zs[j] = spl.info[:samplers][j].info[:logevidence][end] + end + + # Resampling of CSMC nodes indices + conditonal_nodes_indices = collect(1:spl.alg.n_csmc_nodes) + unconditonal_nodes_indices = collect(spl.alg.n_csmc_nodes+1:spl.alg.n_nodes) + for j in 1:spl.alg.n_csmc_nodes + # Select a new conditional node by simulating cj + log_ksi = vcat(log_zs[unconditonal_nodes_indices], log_zs[j]) + ksi = exp.(log_ksi .- maximum(log_ksi)) + c_j = wsample(ksi) # sample from Categorical with unormalized weights + + if c_j < length(log_ksi) # if CSMC node selects another index than itself + conditonal_nodes_indices[j] = unconditonal_nodes_indices[c_j] + unconditonal_nodes_indices[c_j] = j + end + end + nodes_permutation = vcat(conditonal_nodes_indices, unconditonal_nodes_indices) + + VarInfos[nodes_permutation] +end + +function sample(model::Model, alg::IPMCMC) + + spl = Sampler(alg) + + # Number of samples to store + sample_n = alg.n_iters * alg.n_csmc_nodes + + # Init samples + time_total = zero(Float64) + samples = Array{Sample}(undef, sample_n) + weight = 1 / sample_n + for i = 1:sample_n + samples[i] = Sample(weight, Dict{Symbol, Any}()) + end + + # Init parameters + vi = empty!(VarInfo(model)) + VarInfos = Array{VarInfo}(undef, spl.alg.n_nodes) + for j in 1:spl.alg.n_nodes + VarInfos[j] = deepcopy(vi) + end + n = spl.alg.n_iters + + # IPMCMC steps + if PROGRESS[] spl.info[:progress] = ProgressMeter.Progress(n, 1, "[IPMCMC] Sampling...", 0) end + for i = 1:n + @debug "IPMCMC stepping..." + time_elapsed = @elapsed VarInfos = step(model, spl, VarInfos, i==1) + + # Save each CSMS retained path as a sample + for j in 1:spl.alg.n_csmc_nodes + samples[(i-1)*alg.n_csmc_nodes+j].value = Sample(VarInfos[j], spl).value + end + + time_total += time_elapsed + if PROGRESS[] + haskey(spl.info, :progress) && ProgressMeter.update!(spl.info[:progress], spl.info[:progress].counter + 1) + end + end + + println("[IPMCMC] Finished with") + println(" Running time = $time_total;") + + Chain(0.0, samples) # wrap the result by Chain +end diff --git a/Turing/src/core/container.jl b/Turing/src/core/container.jl new file mode 100644 index 00000000..35353f01 --- /dev/null +++ b/Turing/src/core/container.jl @@ -0,0 +1,360 @@ +mutable struct Trace{Tspl<:AbstractSampler, Tvi<:AbstractVarInfo, Tmodel<:Model} + model::Tmodel + spl::Tspl + vi::Tvi + ctask::CTask + + function Trace{SampleFromPrior}(model::Model, spl::AbstractSampler, vi::AbstractVarInfo) + return new{SampleFromPrior,typeof(vi),typeof(model)}(model, SampleFromPrior(), vi) + end + function Trace{S}(model::Model, spl::S, vi::AbstractVarInfo) where S<:Sampler + return new{S,typeof(vi),typeof(model)}(model, spl, vi) + end +end + +function Base.copy(trace::Trace) + vi = deepcopy(trace.vi) + res = Trace{typeof(trace.spl)}(trace.model, trace.spl, vi) + res.ctask = copy(trace.ctask) + return res +end + +# NOTE: this function is called by `forkr` +function Trace(f, m::Model, spl::AbstractSampler, vi::AbstractVarInfo) + res = Trace{typeof(spl)}(m, spl, deepcopy(vi)) + ctask = CTask() do + res = f() + produce(nothing) + return res + end + task = ctask.task + if task.storage === nothing + task.storage = IdDict() + end + task.storage[:turing_trace] = res # create a backward reference in task_local_storage + res.ctask = ctask + return res +end + +function Trace(m::Model, spl::AbstractSampler, vi::AbstractVarInfo) + res = Trace{typeof(spl)}(m, spl, deepcopy(vi)) + reset_num_produce!(res.vi) + ctask = CTask() do + res = m(vi, spl) + produce(nothing) + return res + end + task = ctask.task + if task.storage === nothing + task.storage = IdDict() + end + task.storage[:turing_trace] = res # create a backward reference in task_local_storage + res.ctask = ctask + return res +end + +# step to the next observe statement, return log likelihood +Libtask.consume(t::Trace) = (increment_num_produce!(t.vi); consume(t.ctask)) + +# Task copying version of fork for Trace. +function fork(trace :: Trace, is_ref :: Bool = false) + newtrace = copy(trace) + is_ref && set_retained_vns_del_by_spl!(newtrace.vi, newtrace.spl) + newtrace.ctask.task.storage[:turing_trace] = newtrace + return newtrace +end + +# PG requires keeping all randomness for the reference particle +# Create new task and copy randomness +function forkr(trace::Trace) + newtrace = Trace(trace.ctask.task.code, trace.model, trace.spl, deepcopy(trace.vi)) + newtrace.spl = trace.spl + reset_num_produce!(newtrace.vi) + return newtrace +end + +current_trace() = current_task().storage[:turing_trace] + +const Particle = Trace + +""" +Data structure for particle filters +- effectiveSampleSize(pc :: ParticleContainer) +- normalise!(pc::ParticleContainer) +- consume(pc::ParticleContainer): return incremental likelihood +""" +mutable struct ParticleContainer{T<:Particle} + "Particles." + vals::Vector{T} + "Unnormalized logarithmic weights." + logWs::Vector{Float64} +end + +function ParticleContainer(particles::Vector{<:Particle}) + return ParticleContainer(particles, zeros(length(particles))) +end + +Base.collect(pc::ParticleContainer) = pc.vals +Base.length(pc::ParticleContainer) = length(pc.vals) +Base.@propagate_inbounds Base.getindex(pc::ParticleContainer, i::Int) = pc.vals[i] + +# registers a new x-particle in the container +function Base.push!(pc::ParticleContainer, p::Particle) + push!(pc.vals, p) + push!(pc.logWs, 0.0) + pc +end + +# clones a theta-particle +function Base.copy(pc::ParticleContainer) + # fork particles + vals = eltype(pc.vals)[fork(p) for p in pc.vals] + + # copy weights + logWs = copy(pc.logWs) + + ParticleContainer(vals, logWs) +end + +""" + reset_logweights!(pc::ParticleContainer) + +Reset all unnormalized logarithmic weights to zero. +""" +function reset_logweights!(pc::ParticleContainer) + fill!(pc.logWs, 0.0) + return pc +end + +""" + increase_logweight!(pc::ParticleContainer, i::Int, x) + +Increase the unnormalized logarithmic weight of the `i`th particle with `x`. +""" +function increase_logweight!(pc::ParticleContainer, i, logw) + pc.logWs[i] += logw + return pc +end + +""" + getweights(pc::ParticleContainer) + +Compute the normalized weights of the particles. +""" +getweights(pc::ParticleContainer) = softmax(pc.logWs) + +""" + getweight(pc::ParticleContainer, i) + +Compute the normalized weight of the `i`th particle. +""" +getweight(pc::ParticleContainer, i) = exp(pc.logWs[i] - logZ(pc)) + +""" + logZ(pc::ParticleContainer) + +Return the logarithm of the normalizing constant of the unnormalized logarithmic weights. +""" +logZ(pc::ParticleContainer) = logsumexp(pc.logWs) + +""" + effectiveSampleSize(pc::ParticleContainer) + +Compute the effective sample size ``1 / ∑ wᵢ²``, where ``wᵢ```are the normalized weights. +""" +function effectiveSampleSize(pc::ParticleContainer) + Ws = getweights(pc) + return inv(sum(abs2, Ws)) +end + +""" + resample_propagate!(pc::ParticleContainer[, randcat = resample_systematic, ref = nothing; + weights = getweights(pc)]) + +Resample and propagate the particles in `pc`. + +Function `randcat` is used for sampling ancestor indices from the categorical distribution +of the particle `weights`. For Particle Gibbs sampling, one can provide a reference particle +`ref` that is ensured to survive the resampling step. +""" +function resample_propagate!( + pc::ParticleContainer, + randcat = Turing.Inference.resample_systematic, + ref::Union{Particle, Nothing} = nothing; + weights = getweights(pc) +) + # check that weights are not NaN + @assert !any(isnan, weights) + + # sample ancestor indices + n = length(pc) + nresamples = ref === nothing ? n : n - 1 + indx = randcat(weights, nresamples) + + # count number of children for each particle + num_children = zeros(Int, n) + @inbounds for i in indx + num_children[i] += 1 + end + + # fork particles + particles = collect(pc) + children = similar(particles) + j = 0 + @inbounds for i in 1:n + ni = num_children[i] + + if ni > 0 + # fork first child + pi = particles[i] + isref = pi === ref + p = isref ? fork(pi, isref) : pi + children[j += 1] = p + + # fork additional children + for _ in 2:ni + children[j += 1] = fork(p, isref) + end + end + end + + if ref !== nothing + # Insert the retained particle. This is based on the replaying trick for efficiency + # reasons. If we implement PG using task copying, we need to store Nx * T particles! + @inbounds children[n] = ref + end + + # replace particles and log weights in the container with new particles and weights + pc.vals = children + reset_logweights!(pc) + + pc +end + +""" + reweight!(pc::ParticleContainer) + +Check if the final time step is reached, and otherwise reweight the particles by +considering the next observation. +""" +function reweight!(pc::ParticleContainer) + n = length(pc) + + particles = collect(pc) + numdone = 0 + for i in 1:n + p = particles[i] + + # Obtain ``\\log p(yₜ | y₁, …, yₜ₋₁, x₁, …, xₜ, θ₁, …, θₜ)``, or `nothing` if the + # the execution of the model is finished. + # Here ``yᵢ`` are observations, ``xᵢ`` variables of the particle filter, and + # ``θᵢ`` are variables of other samplers. + score = Libtask.consume(p) + + if score === nothing + numdone += 1 + else + # Increase the unnormalized logarithmic weights, accounting for the variables + # of other samplers. + increase_logweight!(pc, i, score + getlogp(p.vi)) + + # Reset the accumulator of the log probability in the model so that we can + # accumulate log probabilities of variables of other samplers until the next + # observation. + resetlogp!(p.vi) + end + end + + # Check if all particles are propagated to the final time point. + numdone == n && return true + + # The posterior for models with random number of observations is not well-defined. + if numdone != 0 + error("mis-aligned execution traces: # particles = ", n, + " # completed trajectories = ", numdone, + ". Please make sure the number of observations is NOT random.") + end + + return false +end + +""" + sweep!(pc::ParticleContainer, resampler) + +Perform a particle sweep and return an unbiased estimate of the log evidence. + +The resampling steps use the given `resampler`. + +# Reference + +Del Moral, P., Doucet, A., & Jasra, A. (2006). Sequential monte carlo samplers. +Journal of the Royal Statistical Society: Series B (Statistical Methodology), 68(3), 411-436. +""" +function sweep!(pc::ParticleContainer, resampler) + # Initial step: + + # Resample and propagate particles. + resample_propagate!(pc, resampler) + + # Compute the current normalizing constant ``Z₀`` of the unnormalized logarithmic + # weights. + # Usually it is equal to the number of particles in the beginning but this + # implementation covers also the unlikely case of a particle container that is + # initialized with non-zero logarithmic weights. + logZ0 = logZ(pc) + + # Reweight the particles by including the first observation ``y₁``. + isdone = reweight!(pc) + + # Compute the normalizing constant ``Z₁`` after reweighting. + logZ1 = logZ(pc) + + # Compute the estimate of the log evidence ``\\log p(y₁)``. + logevidence = logZ1 - logZ0 + + # For observations ``y₂, …, yₜ``: + while !isdone + # Resample and propagate particles. + resample_propagate!(pc, resampler) + + # Compute the current normalizing constant ``Z₀`` of the unnormalized logarithmic + # weights. + logZ0 = logZ(pc) + + # Reweight the particles by including the next observation ``yₜ``. + isdone = reweight!(pc) + + # Compute the normalizing constant ``Z₁`` after reweighting. + logZ1 = logZ(pc) + + # Compute the estimate of the log evidence ``\\log p(y₁, …, yₜ)``. + logevidence += logZ1 - logZ0 + end + + return logevidence +end + +struct ResampleWithESSThreshold{R, T<:Real} + resampler::R + threshold::T +end + +function ResampleWithESSThreshold(resampler = Turing.Inference.resample_systematic) + ResampleWithESSThreshold(resampler, 0.5) +end + +function resample_propagate!( + pc::ParticleContainer, + resampler::ResampleWithESSThreshold, + ref::Union{Particle,Nothing} = nothing; + weights = getweights(pc) +) + # Compute the effective sample size ``1 / ∑ wᵢ²`` with normalized weights ``wᵢ`` + ess = inv(sum(abs2, weights)) + + if ess ≤ resampler.threshold * length(pc) + resample_propagate!(pc, resampler.resampler, ref; weights = weights) + end + + pc +end diff --git a/Turing/src/inference/AdvancedSMC.jl b/Turing/src/inference/AdvancedSMC.jl new file mode 100644 index 00000000..f2354b4d --- /dev/null +++ b/Turing/src/inference/AdvancedSMC.jl @@ -0,0 +1,502 @@ +### +### Particle Filtering and Particle MCMC Samplers. +### + +#### +#### Generic Sequential Monte Carlo sampler. +#### + +""" +$(TYPEDEF) + +Sequential Monte Carlo sampler. + +# Fields + +$(TYPEDFIELDS) +""" +struct SMC{space, R} <: ParticleInference + resampler::R +end + +""" + SMC(space...) + SMC([resampler = ResampleWithESSThreshold(), space = ()]) + SMC([resampler = resample_systematic, ]threshold[, space = ()]) + +Create a sequential Monte Carlo sampler of type [`SMC`](@ref) for the variables in `space`. + +If the algorithm for the resampling step is not specified explicitly, systematic resampling +is performed if the estimated effective sample size per particle drops below 0.5. +""" +function SMC(resampler = Turing.Core.ResampleWithESSThreshold(), space::Tuple = ()) + return SMC{space, typeof(resampler)}(resampler) +end + +# Convenient constructors with ESS threshold +function SMC(resampler, threshold::Real, space::Tuple = ()) + return SMC(Turing.Core.ResampleWithESSThreshold(resampler, threshold), space) +end +SMC(threshold::Real, space::Tuple = ()) = SMC(resample_systematic, threshold, space) + +# If only the space is defined +SMC(space::Symbol...) = SMC(space) +SMC(space::Tuple) = SMC(Turing.Core.ResampleWithESSThreshold(), space) + +struct SMCTransition{T,F<:AbstractFloat} + "The parameters for any given sample." + θ::T + "The joint log probability of the sample (NOTE: does not work, always set to zero)." + lp::F + "The weight of the particle the sample was retrieved from." + weight::F +end + +function SMCTransition(vi::AbstractVarInfo, weight) + theta = tonamedtuple(vi) + + # This is pretty useless since we reset the log probability continuously in the + # particle sweep. + lp = getlogp(vi) + + return SMCTransition(theta, lp, weight) +end + +metadata(t::SMCTransition) = (lp = t.lp, weight = t.weight) + +DynamicPPL.getlogp(t::SMCTransition) = t.lp + +struct SMCState{P,F<:AbstractFloat} + particles::P + particleindex::Int + # The logevidence after aggregating all samples together. + average_logevidence::F +end + +function getlogevidence(samples, sampler::Sampler{<:SMC}, state::SMCState) + return state.average_logevidence +end + +function AbstractMCMC.sample( + rng::AbstractRNG, + model::AbstractModel, + sampler::Sampler{<:SMC}, + N::Integer; + chain_type=MCMCChains.Chains, + resume_from=nothing, + progress=PROGRESS[], + kwargs... +) + if resume_from === nothing + return AbstractMCMC.mcmcsample(rng, model, sampler, N; + chain_type=chain_type, + progress=progress, + nparticles=N, + kwargs...) + else + return resume(resume_from, N; + chain_type=chain_type, progress=progress, nparticles=N, kwargs...) + end +end + +function DynamicPPL.initialstep( + ::AbstractRNG, + model::AbstractModel, + spl::Sampler{<:SMC}, + vi::AbstractVarInfo; + nparticles::Int, + kwargs... +) + # Reset the VarInfo. + reset_num_produce!(vi) + set_retained_vns_del_by_spl!(vi, spl) + resetlogp!(vi) + empty!(vi) + + # Create a new set of particles. + T = Trace{typeof(spl),typeof(vi),typeof(model)} + particles = ParticleContainer(T[Trace(model, spl, vi) for _ in 1:nparticles]) + + # Perform particle sweep. + logevidence = sweep!(particles, spl.alg.resampler) + + # Extract the first particle and its weight. + particle = particles.vals[1] + weight = getweight(particles, 1) + + # Compute the first transition and the first state. + transition = SMCTransition(particle.vi, weight) + state = SMCState(particles, 2, logevidence) + + return transition, state +end + +function AbstractMCMC.step( + ::AbstractRNG, + model::AbstractModel, + spl::Sampler{<:SMC}, + state::SMCState; + kwargs... +) + # Extract the index of the current particle. + index = state.particleindex + + # Extract the current particle and its weight. + particles = state.particles + particle = particles.vals[index] + weight = getweight(particles, index) + + # Compute the transition and the next state. + transition = SMCTransition(particle.vi, weight) + nextstate = SMCState(state.particles, index + 1, state.average_logevidence) + + return transition, nextstate +end + +#### +#### Particle Gibbs sampler. +#### + +""" +$(TYPEDEF) + +Particle Gibbs sampler. + +Note that this method is particle-based, and arrays of variables +must be stored in a [`TArray`](@ref) object. + +# Fields + +$(TYPEDFIELDS) +""" +struct PG{space,R} <: ParticleInference + """Number of particles.""" + nparticles::Int + """Resampling algorithm.""" + resampler::R +end + +isgibbscomponent(::PG) = true + +""" + PG(n, space...) + PG(n, [resampler = ResampleWithESSThreshold(), space = ()]) + PG(n, [resampler = resample_systematic, ]threshold[, space = ()]) + +Create a Particle Gibbs sampler of type [`PG`](@ref) with `n` particles for the variables +in `space`. + +If the algorithm for the resampling step is not specified explicitly, systematic resampling +is performed if the estimated effective sample size per particle drops below 0.5. +""" +function PG( + nparticles::Int, + resampler = Turing.Core.ResampleWithESSThreshold(), + space::Tuple = (), +) + return PG{space, typeof(resampler)}(nparticles, resampler) +end + +# Convenient constructors with ESS threshold +function PG(nparticles::Int, resampler, threshold::Real, space::Tuple = ()) + return PG(nparticles, Turing.Core.ResampleWithESSThreshold(resampler, threshold), space) +end +function PG(nparticles::Int, threshold::Real, space::Tuple = ()) + return PG(nparticles, resample_systematic, threshold, space) +end + +# If only the number of particles and the space is defined +PG(nparticles::Int, space::Symbol...) = PG(nparticles, space) +function PG(nparticles::Int, space::Tuple) + return PG(nparticles, Turing.Core.ResampleWithESSThreshold(), space) +end + +const CSMC = PG # type alias of PG as Conditional SMC + +struct PGTransition{T,F<:AbstractFloat} + "The parameters for any given sample." + θ::T + "The joint log probability of the sample (NOTE: does not work, always set to zero)." + lp::F + "The log evidence of the sample." + logevidence::F +end + +function PGTransition(vi::AbstractVarInfo, logevidence) + theta = tonamedtuple(vi) + + # This is pretty useless since we reset the log probability continuously in the + # particle sweep. + lp = getlogp(vi) + + return PGTransition(theta, lp, logevidence) +end + +metadata(t::PGTransition) = (lp = t.lp, logevidence = t.logevidence) + +DynamicPPL.getlogp(t::PGTransition) = t.lp + +function getlogevidence(samples, sampler::Sampler{<:PG}, vi::AbstractVarInfo) + return mean(x.logevidence for x in samples) +end + +function DynamicPPL.initialstep( + rng::AbstractRNG, + model::AbstractModel, + spl::Sampler{<:PG}, + vi::AbstractVarInfo; + kwargs... +) + # Reset the VarInfo before new sweep + reset_num_produce!(vi) + set_retained_vns_del_by_spl!(vi, spl) + resetlogp!(vi) + + # Create a new set of particles + num_particles = spl.alg.nparticles + T = Trace{typeof(spl),typeof(vi),typeof(model)} + particles = ParticleContainer(T[Trace(model, spl, vi) for _ in 1:num_particles]) + + # Perform a particle sweep. + logevidence = sweep!(particles, spl.alg.resampler) + + # Pick a particle to be retained. + Ws = getweights(particles) + indx = randcat(Ws) + reference = particles.vals[indx] + + # Compute the first transition. + _vi = reference.vi + transition = PGTransition(_vi, logevidence) + + return transition, _vi +end + +function AbstractMCMC.step( + ::AbstractRNG, + model::AbstractModel, + spl::Sampler{<:PG}, + vi::AbstractVarInfo; + kwargs... +) + # Reset the VarInfo before new sweep. + reset_num_produce!(vi) + set_retained_vns_del_by_spl!(vi, spl) + resetlogp!(vi) + + # Create a new set of particles. + num_particles = spl.alg.nparticles + T = Trace{typeof(spl),typeof(vi),typeof(model)} + x = Vector{T}(undef, num_particles) + @inbounds for i in 1:(num_particles - 1) + x[i] = Trace(model, spl, vi) + end + # Create reference particle. + @inbounds x[num_particles] = forkr(Trace(model, spl, vi)) + particles = ParticleContainer(x) + + # Perform a particle sweep. + logevidence = sweep!(particles, spl.alg.resampler) + + # Pick a particle to be retained. + Ws = getweights(particles) + indx = randcat(Ws) + newreference = particles.vals[indx] + + # Compute the transition. + _vi = newreference.vi + transition = PGTransition(_vi, logevidence) + + return transition, _vi +end + +function DynamicPPL.assume( + rng, + spl::Sampler{<:Union{PG,SMC}}, + dist::Distribution, + vn::VarName, + ::Any +) + vi = current_trace().vi + if inspace(vn, spl) + if ~haskey(vi, vn) + r = rand(rng, dist) + push!(vi, vn, r, dist, spl) + elseif is_flagged(vi, vn, "del") + unset_flag!(vi, vn, "del") + r = rand(rng, dist) + vi[vn] = vectorize(dist, r) + setgid!(vi, spl.selector, vn) + setorder!(vi, vn, get_num_produce(vi)) + else + updategid!(vi, vn, spl) + r = vi[vn] + end + else # vn belongs to other sampler <=> conditionning on vn + if haskey(vi, vn) + r = vi[vn] + else + r = rand(rng, dist) + push!(vi, vn, r, dist, Selector(:invalid)) + end + lp = logpdf_with_trans(dist, r, istrans(vi, vn)) + acclogp!(vi, lp) + end + return r, 0 +end + +function DynamicPPL.observe(spl::Sampler{<:Union{PG,SMC}}, dist::Distribution, value, vi) + produce(logpdf(dist, value)) + return 0 +end + +#### +#### Resampling schemes for particle filters +#### + +# Some references +# - http://arxiv.org/pdf/1301.4019.pdf +# - http://people.isy.liu.se/rt/schon/Publications/HolSG2006.pdf +# Code adapted from: http://uk.mathworks.com/matlabcentral/fileexchange/24968-resampling-methods-for-particle-filtering + +# Default resampling scheme +function resample(w::AbstractVector{<:Real}, num_particles::Integer=length(w)) + return resample_systematic(w, num_particles) +end + +# More stable, faster version of rand(Categorical) +function randcat(p::AbstractVector{<:Real}) + T = eltype(p) + r = rand(T) + s = 1 + for j in eachindex(p) + r -= p[j] + if r <= zero(T) + s = j + break + end + end + return s +end + +function resample_multinomial(w::AbstractVector{<:Real}, num_particles::Integer) + return rand(Distributions.sampler(Categorical(w)), num_particles) +end + +function resample_residual(w::AbstractVector{<:Real}, num_particles::Integer) + # Pre-allocate array for resampled particles + indices = Vector{Int}(undef, num_particles) + + # deterministic assignment + residuals = similar(w) + i = 1 + @inbounds for j in 1:length(w) + x = num_particles * w[j] + floor_x = floor(Int, x) + for k in 1:floor_x + indices[i] = j + i += 1 + end + residuals[j] = x - floor_x + end + + # sampling from residuals + if i <= num_particles + residuals ./= sum(residuals) + rand!(Categorical(residuals), view(indices, i:num_particles)) + end + + return indices +end + + +""" + resample_stratified(weights, n) + +Return a vector of `n` samples `x₁`, ..., `xₙ` from the numbers 1, ..., `length(weights)`, +generated by stratified resampling. + +In stratified resampling `n` ordered random numbers `u₁`, ..., `uₙ` are generated, where +``uₖ \\sim U[(k - 1) / n, k / n)``. Based on these numbers the samples `x₁`, ..., `xₙ` +are selected according to the multinomial distribution defined by the normalized `weights`, +i.e., `xᵢ = j` if and only if +``uᵢ \\in [\\sum_{s=1}^{j-1} weights_{s}, \\sum_{s=1}^{j} weights_{s})``. +""" +function resample_stratified(weights::AbstractVector{<:Real}, n::Integer) + # check input + m = length(weights) + m > 0 || error("weight vector is empty") + + # pre-calculations + @inbounds v = n * weights[1] + + # generate all samples + samples = Array{Int}(undef, n) + sample = 1 + @inbounds for i in 1:n + # sample next `u` (scaled by `n`) + u = oftype(v, i - 1 + rand()) + + # as long as we have not found the next sample + while v < u + # increase and check the sample + sample += 1 + sample > m && + error("sample could not be selected (are the weights normalized?)") + + # update the cumulative sum of weights (scaled by `n`) + v += n * weights[sample] + end + + # save the next sample + samples[i] = sample + end + + return samples +end + +""" + resample_systematic(weights, n) + +Return a vector of `n` samples `x₁`, ..., `xₙ` from the numbers 1, ..., `length(weights)`, +generated by systematic resampling. + +In systematic resampling a random number ``u \\sim U[0, 1)`` is used to generate `n` ordered +numbers `u₁`, ..., `uₙ` where ``uₖ = (u + k − 1) / n``. Based on these numbers the samples +`x₁`, ..., `xₙ` are selected according to the multinomial distribution defined by the +normalized `weights`, i.e., `xᵢ = j` if and only if +``uᵢ \\in [\\sum_{s=1}^{j-1} weights_{s}, \\sum_{s=1}^{j} weights_{s})``. +""" +function resample_systematic(weights::AbstractVector{<:Real}, n::Integer) + # check input + m = length(weights) + m > 0 || error("weight vector is empty") + + # pre-calculations + @inbounds v = n * weights[1] + u = oftype(v, rand()) + + # find all samples + samples = Array{Int}(undef, n) + sample = 1 + @inbounds for i in 1:n + # as long as we have not found the next sample + while v < u + # increase and check the sample + sample += 1 + sample > m && + error("sample could not be selected (are the weights normalized?)") + + # update the cumulative sum of weights (scaled by `n`) + v += n * weights[sample] + end + + # save the next sample + samples[i] = sample + + # update `u` + u += one(u) + end + + return samples +end diff --git a/Turing/test/core/container.jl b/Turing/test/core/container.jl new file mode 100644 index 00000000..6731e1e0 --- /dev/null +++ b/Turing/test/core/container.jl @@ -0,0 +1,127 @@ +using Turing, Random +using Turing: ParticleContainer, getweights, getweight, + effectiveSampleSize, Trace, current_trace, VarName, VarInfo, + Sampler, consume, produce, fork, getlogp +using Turing.Core: logZ, reset_logweights!, increase_logweight!, + resample_propagate!, reweight! +using Test + +dir = splitdir(splitdir(pathof(Turing))[1])[1] +include(dir*"/test/test_utils/AllUtils.jl") + +@testset "container.jl" begin + @turing_testset "copy particle container" begin + pc = ParticleContainer(Trace[]) + newpc = copy(pc) + + @test newpc.logWs == pc.logWs + @test typeof(pc) === typeof(newpc) + end + + @turing_testset "particle container" begin + # Create a resumable function that always yields `logp`. + function fpc(logp) + f = let logp = logp + () -> begin + while true + produce(logp) + end + end + end + return f + end + + # Dummy sampler that is not actually used. + sampler = Sampler(PG(5), empty_model()) + + # Create particle container. + logps = [0.0, -1.0, -2.0] + particles = [Trace(fpc(logp), empty_model(), sampler, VarInfo()) for logp in logps] + pc = ParticleContainer(particles) + + # Initial state. + @test all(iszero(getlogp(particle.vi)) for particle in pc.vals) + @test pc.logWs == zeros(3) + @test getweights(pc) == fill(1/3, 3) + @test all(getweight(pc, i) == 1/3 for i in 1:3) + @test logZ(pc) ≈ log(3) + @test effectiveSampleSize(pc) == 3 + + # Reweight particles. + reweight!(pc) + @test all(iszero(getlogp(particle.vi)) for particle in pc.vals) + @test pc.logWs == logps + @test getweights(pc) ≈ exp.(logps) ./ sum(exp, logps) + @test all(getweight(pc, i) ≈ exp(logps[i]) / sum(exp, logps) for i in 1:3) + @test logZ(pc) ≈ log(sum(exp, logps)) + + # Reweight particles. + reweight!(pc) + @test all(iszero(getlogp(particle.vi)) for particle in pc.vals) + @test pc.logWs == 2 .* logps + @test getweights(pc) == exp.(2 .* logps) ./ sum(exp, 2 .* logps) + @test all(getweight(pc, i) ≈ exp(2 * logps[i]) / sum(exp, 2 .* logps) for i in 1:3) + @test logZ(pc) ≈ log(sum(exp, 2 .* logps)) + + # Resample and propagate particles. + resample_propagate!(pc) + @test all(iszero(getlogp(particle.vi)) for particle in pc.vals) + @test pc.logWs == zeros(3) + @test getweights(pc) == fill(1/3, 3) + @test all(getweight(pc, i) == 1/3 for i in 1:3) + @test logZ(pc) ≈ log(3) + @test effectiveSampleSize(pc) == 3 + + # Reweight particles. + reweight!(pc) + @test all(iszero(getlogp(particle.vi)) for particle in pc.vals) + @test pc.logWs ⊆ logps + @test getweights(pc) == exp.(pc.logWs) ./ sum(exp, pc.logWs) + @test all(getweight(pc, i) ≈ exp(pc.logWs[i]) / sum(exp, pc.logWs) for i in 1:3) + @test logZ(pc) ≈ log(sum(exp, pc.logWs)) + + # Increase unnormalized logarithmic weights. + logws = copy(pc.logWs) + increase_logweight!(pc, 2, 1.41) + @test pc.logWs == logws + [0, 1.41, 0] + + # Reset unnormalized logarithmic weights. + logws = pc.logWs + reset_logweights!(pc) + @test pc.logWs === logws + @test all(iszero, pc.logWs) + end + + @turing_testset "trace" begin + n = Ref(0) + + alg = PG(5) + spl = Sampler(alg, empty_model()) + dist = Normal(0, 1) + function f2() + t = TArray(Int, 1); + t[1] = 0; + while true + ct = current_trace() + vn = @varname x[n] + Turing.assume(Random.GLOBAL_RNG, spl, dist, vn, ct.vi) + n[] += 1 + produce(t[1]) + vn = @varname x[n] + Turing.assume(Random.GLOBAL_RNG, spl, dist, vn, ct.vi) + n[] += 1 + t[1] = 1 + t[1] + end + end + + # Test task copy version of trace + tr = Trace(f2, empty_model(), spl, VarInfo()) + + consume(tr); consume(tr) + a = fork(tr); + consume(a); consume(a) + + @test consume(tr) == 2 + @test consume(a) == 4 + end +end diff --git a/Turing/test/inference/AdvancedSMC.jl b/Turing/test/inference/AdvancedSMC.jl new file mode 100644 index 00000000..a987cc81 --- /dev/null +++ b/Turing/test/inference/AdvancedSMC.jl @@ -0,0 +1,253 @@ +using Turing, Random, Test +using Turing.Core: ResampleWithESSThreshold +using Turing.Inference: getspace, resample_systematic, resample_multinomial + +using Random + +dir = splitdir(splitdir(pathof(Turing))[1])[1] +include(dir*"/test/test_utils/AllUtils.jl") + +@testset "SMC" begin + @turing_testset "constructor" begin + s = SMC() + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === () + + s = SMC(:x) + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === (:x,) + + s = SMC((:x,)) + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === (:x,) + + s = SMC(:x, :y) + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === (:x, :y) + + s = SMC((:x, :y)) + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === (:x, :y) + + s = SMC(0.6) + @test s.resampler === ResampleWithESSThreshold(resample_systematic, 0.6) + @test getspace(s) === () + + s = SMC(0.6, (:x,)) + @test s.resampler === ResampleWithESSThreshold(resample_systematic, 0.6) + @test getspace(s) === (:x,) + + s = SMC(resample_multinomial, 0.6) + @test s.resampler === ResampleWithESSThreshold(resample_multinomial, 0.6) + @test getspace(s) === () + + s = SMC(resample_multinomial, 0.6, (:x,)) + @test s.resampler === ResampleWithESSThreshold(resample_multinomial, 0.6) + @test getspace(s) === (:x,) + + s = SMC(resample_systematic) + @test s.resampler === resample_systematic + @test getspace(s) === () + + s = SMC(resample_systematic, (:x,)) + @test s.resampler === resample_systematic + @test getspace(s) === (:x,) + end + + @turing_testset "models" begin + @model function normal() + a ~ Normal(4,5) + 3 ~ Normal(a,2) + b ~ Normal(a,1) + 1.5 ~ Normal(b,2) + a, b + end + + tested = sample(normal(), SMC(), 100); + + # failing test + @model function fail_smc() + a ~ Normal(4,5) + 3 ~ Normal(a,2) + b ~ Normal(a,1) + if a >= 4.0 + 1.5 ~ Normal(b,2) + end + a, b + end + + @test_throws ErrorException sample(fail_smc(), SMC(), 100) + end + + @turing_testset "logevidence" begin + Random.seed!(100) + + @model function test() + a ~ Normal(0, 1) + x ~ Bernoulli(1) + b ~ Gamma(2, 3) + 1 ~ Bernoulli(x / 2) + c ~ Beta() + 0 ~ Bernoulli(x / 2) + x + end + + chains_smc = sample(test(), SMC(), 100) + + @test all(isone, chains_smc[:x]) + @test chains_smc.logevidence ≈ -2 * log(2) + end +end + +@testset "PG" begin + @turing_testset "constructor" begin + s = PG(10) + @test s.nparticles == 10 + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === () + + s = PG(20, :x) + @test s.nparticles == 20 + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === (:x,) + + s = PG(30, (:x,)) + @test s.nparticles == 30 + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === (:x,) + + s = PG(40, :x, :y) + @test s.nparticles == 40 + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === (:x, :y) + + s = PG(50, (:x, :y)) + @test s.nparticles == 50 + @test s.resampler == ResampleWithESSThreshold() + @test getspace(s) === (:x, :y) + + s = PG(60, 0.6) + @test s.nparticles == 60 + @test s.resampler === ResampleWithESSThreshold(resample_systematic, 0.6) + @test getspace(s) === () + + s = PG(70, 0.6, (:x,)) + @test s.nparticles == 70 + @test s.resampler === ResampleWithESSThreshold(resample_systematic, 0.6) + @test getspace(s) === (:x,) + + s = PG(80, resample_multinomial, 0.6) + @test s.nparticles == 80 + @test s.resampler === ResampleWithESSThreshold(resample_multinomial, 0.6) + @test getspace(s) === () + + s = PG(90, resample_multinomial, 0.6, (:x,)) + @test s.nparticles == 90 + @test s.resampler === ResampleWithESSThreshold(resample_multinomial, 0.6) + @test getspace(s) === (:x,) + + s = PG(100, resample_systematic) + @test s.nparticles == 100 + @test s.resampler === resample_systematic + @test getspace(s) === () + + s = PG(110, resample_systematic, (:x,)) + @test s.nparticles == 110 + @test s.resampler === resample_systematic + @test getspace(s) === (:x,) + end + + @turing_testset "logevidence" begin + Random.seed!(100) + + @model function test() + a ~ Normal(0, 1) + x ~ Bernoulli(1) + b ~ Gamma(2, 3) + 1 ~ Bernoulli(x / 2) + c ~ Beta() + 0 ~ Bernoulli(x / 2) + x + end + + chains_pg = sample(test(), PG(10), 100) + + @test all(isone, chains_pg[:x]) + @test chains_pg.logevidence ≈ -2 * log(2) atol = 0.01 + end +end + +# @testset "pmmh.jl" begin +# @turing_testset "pmmh constructor" begin +# N = 2000 +# s1 = PMMH(N, SMC(10, :s), MH(1,(:m, s -> Normal(s, sqrt(1))))) +# s2 = PMMH(N, SMC(10, :s), MH(1, :m)) +# s3 = PIMH(N, SMC()) +# +# c1 = sample(gdemo_default, s1) +# c2 = sample(gdemo_default, s2) +# c3 = sample(gdemo_default, s3) +# end +# @numerical_testset "pmmh inference" begin +# alg = PMMH(2000, SMC(20, :m), MH(1, (:s, GKernel(1)))) +# chain = sample(gdemo_default, alg) +# check_gdemo(chain, atol = 0.1) +# +# # PMMH with prior as proposal +# alg = PMMH(2000, SMC(20, :m), MH(1, :s)) +# chain = sample(gdemo_default, alg) +# check_gdemo(chain, atol = 0.1) +# +# # PIMH +# alg = PIMH(2000, SMC()) +# chain = sample(gdemo_default, alg) +# check_gdemo(chain) +# +# # MoGtest +# pmmh = PMMH(2000, +# SMC(10, :z1, :z2, :z3, :z4), +# MH(1, :mu1, :mu2)) +# chain = sample(MoGtest_default, pmmh) +# +# check_MoGtest_default(chain, atol = 0.1) +# end +# end + +# @testset "ipmcmc.jl" begin +# @turing_testset "ipmcmc constructor" begin +# Random.seed!(125) +# +# N = 50 +# s1 = IPMCMC(10, N, 4, 2) +# s2 = IPMCMC(10, N, 4) +# +# c1 = sample(gdemo_default, s1) +# c2 = sample(gdemo_default, s2) +# end +# @numerical_testset "ipmcmc inference" begin +# alg = IPMCMC(30, 500, 4) +# chain = sample(gdemo_default, alg) +# check_gdemo(chain) +# +# alg2 = IPMCMC(15, 100, 10) +# chain2 = sample(MoGtest_default, alg2) +# check_MoGtest_default(chain2, atol = 0.2) +# end +# end + +@turing_testset "resample.jl" begin + D = [0.3, 0.4, 0.3] + num_samples = Int(1e6) + resSystematic = Turing.Inference.resample_systematic(D, num_samples ) + resStratified = Turing.Inference.resample_stratified(D, num_samples ) + resMultinomial= Turing.Inference.resample_multinomial(D, num_samples ) + resResidual = Turing.Inference.resample_residual(D, num_samples ) + Turing.Inference.resample(D) + resSystematic2=Turing.Inference.resample(D, num_samples ) + + @test sum(resSystematic .== 2) ≈ (num_samples * 0.4) atol=1e-3*num_samples + @test sum(resSystematic2 .== 2) ≈ (num_samples * 0.4) atol=1e-3*num_samples + @test sum(resStratified .== 2) ≈ (num_samples * 0.4) atol=1e-3*num_samples + @test sum(resMultinomial .== 2) ≈ (num_samples * 0.4) atol=1e-2*num_samples + @test sum(resResidual .== 2) ≈ (num_samples * 0.4) atol=1e-2*num_samples +end