diff --git a/Project.toml b/Project.toml index a9a8d81..520bcae 100644 --- a/Project.toml +++ b/Project.toml @@ -31,7 +31,7 @@ KernelDensity = "0.6.4" LinearAlgebra = "1.10" OrdinaryDiffEq = "6.62" Parameters = "0.12" -QuasiMonteCarlo = "0.2.3, 0.3" +QuasiMonteCarlo = "0.3.3" Random = "1.10" RecursiveArrayTools = "3.2" SafeTestsets = "0.1" diff --git a/docs/src/tutorials/juliacon21.md b/docs/src/tutorials/juliacon21.md index a91100d..6923f88 100644 --- a/docs/src/tutorials/juliacon21.md +++ b/docs/src/tutorials/juliacon21.md @@ -5,7 +5,7 @@ We showcase how to use multiple GSA methods, analyze their results and leverage perform Global Sensitivity analysis at scale. ```@example lv -using GlobalSensitivity, QuasiMonteCarlo, OrdinaryDiffEq, Statistics, CairoMakie +using GlobalSensitivity, QuasiMonteCarlo, OrdinaryDiffEq, Statistics, CairoMakie, Random function f(du, u, p, t) du[1] = p[1] * u[1] - p[2] * u[1] * u[2] #prey @@ -54,7 +54,7 @@ fig ``` ```@example lv -sobol_sens = gsa(f1, Sobol(), bounds, samples = 500) +sobol_sens = gsa(f1, Sobol(), bounds, samples = 512) efast_sens = gsa(f1, eFAST(), bounds, samples = 500) ``` @@ -94,10 +94,10 @@ fig ```@example lv using QuasiMonteCarlo -samples = 500 +samples = 512 lb = [1.0, 1.0, 1.0, 1.0] ub = [5.0, 5.0, 5.0, 5.0] -sampler = SobolSample() +sampler = SobolSample(; R = QuasiMonteCarlo.OwenScramble(; base = 2, pad = 9, rng = _rng)) A, B = QuasiMonteCarlo.generate_design_matrices(samples, lb, ub, sampler) sobol_sens_desmat = gsa(f1, Sobol(), A, B) @@ -129,7 +129,7 @@ f1 = function (p) prob1 = remake(prob; p = p) sol = solve(prob1, Tsit5(); saveat = t) end -sobol_sens = gsa(f1, Sobol(nboot = 20), bounds, samples = 500) +sobol_sens = gsa(f1, Sobol(nboot = 20), bounds, samples = 512) fig = Figure(resolution = (600, 400)) ax, hm = CairoMakie.scatter( fig[1, 1], sobol_sens.S1[1][1, 2:end], label = "Prey", markersize = 4) diff --git a/docs/src/tutorials/parallelized_gsa.md b/docs/src/tutorials/parallelized_gsa.md index 64db803..5b83ffc 100644 --- a/docs/src/tutorials/parallelized_gsa.md +++ b/docs/src/tutorials/parallelized_gsa.md @@ -65,7 +65,7 @@ scatter( For the Sobol method, we can similarly do: ```@example ode -m = gsa(f1, Sobol(), [[1, 5], [1, 5], [1, 5], [1, 5]], samples = 1000) +m = gsa(f1, Sobol(), [[1, 5], [1, 5], [1, 5], [1, 5]], samples = 1024) ``` ## Direct Use of Design Matrices @@ -76,10 +76,10 @@ we use [QuasiMonteCarlo.jl](https://docs.sciml.ai/QuasiMonteCarlo/stable/) to ge as follows: ```@example ode -samples = 500 +samples = 512 lb = [1.0, 1.0, 1.0, 1.0] ub = [5.0, 5.0, 5.0, 5.0] -sampler = SobolSample() +sampler = SobolSample(; R = QuasiMonteCarlo.OwenScramble(; base = 2, pad = 9, rng = _rng)) A, B = QuasiMonteCarlo.generate_design_matrices(samples, lb, ub, sampler) ``` diff --git a/docs/src/tutorials/shapley.md b/docs/src/tutorials/shapley.md index d793d10..d0afa95 100644 --- a/docs/src/tutorials/shapley.md +++ b/docs/src/tutorials/shapley.md @@ -128,8 +128,8 @@ barplot( xticklabelrotation = 1, xticks = (1:54, ["θ$i" for i in 1:54]), ylabel = "Shapley Indices", - limits = (nothing, (0.0, 0.2)), - ), + limits = (nothing, (0.0, 0.2)) + ) ) ``` @@ -160,7 +160,7 @@ barplot( xticklabelrotation = 1, xticks = (1:54, ["θ$i" for i in 1:54]), ylabel = "Shapley Indices", - limits = (nothing, (0.0, 0.2)), - ), + limits = (nothing, (0.0, 0.2)) + ) ) ``` diff --git a/src/sobol_sensitivity.jl b/src/sobol_sensitivity.jl index 88631a1..6fec58d 100644 --- a/src/sobol_sensitivity.jl +++ b/src/sobol_sensitivity.jl @@ -46,10 +46,14 @@ by dividing other terms in the variance decomposition by `` Var(Y) ``. - `:Jansen1999` - [M.J.W. Jansen, 1999, Analysis of variance designs for model output, Computer Physics Communi- cation, 117, 35–43.](https://www.sciencedirect.com/science/article/abs/pii/S0010465598001544) - `:Janon2014` - [Janon, A., Klein, T., Lagnoux, A., Nodet, M., & Prieur, C. (2014). Asymptotic normality and efficiency of two Sobol index estimators. ESAIM: Probability and Statistics, 18, 342-364.](https://arxiv.org/abs/1303.6451) +!!! note + + Sobol sampling should be done with $2^k$ points and randomization, take a look at the docs for [QuasiMonteCarlo](https://docs.sciml.ai/QuasiMonteCarlo/stable/randomization/). If the number of samples is not a power of 2, the number of sample points will be changed to the next power of 2. + ### Example ```julia -using GlobalSensitivity, QuasiMonteCarlo +using GlobalSensitivity, QuasiMonteCarlo, Random function ishi(X) A= 7 @@ -57,10 +61,10 @@ function ishi(X) sin(X[1]) + A*sin(X[2])^2+ B*X[3]^4 *sin(X[1]) end -samples = 600000 +samples = 524288 lb = -ones(4)*π ub = ones(4)*π -sampler = SobolSample() +sampler = SobolSample(; R = QuasiMonteCarlo.OwenScramble(; base = 2, pad = 19, rng = Random.default_rng())) A,B = QuasiMonteCarlo.generate_design_matrices(samples,lb,ub,sampler) res1 = gsa(ishi,Sobol(order=[0,1,2]),A,B) @@ -342,10 +346,18 @@ function gsa_sobol_all_y_analysis(method, all_y::AbstractArray{T}, d, n, Ei_esti nboot > 1 ? reshape(ST_CI, size_...) : nothing) end -function gsa(f, method::Sobol, p_range::AbstractVector; samples, kwargs...) +function gsa(f, method::Sobol, p_range::AbstractVector; samples, + rng::AbstractRNG = Random.default_rng(), kwargs...) + samples2n = nextpow(2, samples) + if samples2n != samples + samples = samples2n + @warn "Passed samples is not a power of 2, number of sample points changed to $samples" + end + log2num = round(Int, log2(samples)) AB = QuasiMonteCarlo.generate_design_matrices(samples, [i[1] for i in p_range], [i[2] for i in p_range], - QuasiMonteCarlo.SobolSample(), + QuasiMonteCarlo.SobolSample(; + R = QuasiMonteCarlo.OwenScramble(; base = 2, pad = log2num, rng)), 2 * method.nboot) A = reduce(hcat, @view(AB[1:(method.nboot)])) B = reduce(hcat, @view(AB[(method.nboot + 1):end])) diff --git a/test/sobol_method.jl b/test/sobol_method.jl index 764c594..37a5062 100644 --- a/test/sobol_method.jl +++ b/test/sobol_method.jl @@ -1,4 +1,4 @@ -using GlobalSensitivity, QuasiMonteCarlo, Test, OrdinaryDiffEq +using GlobalSensitivity, QuasiMonteCarlo, Test, OrdinaryDiffEq, Random function ishi_batch(X) A = 7 @@ -22,10 +22,10 @@ function linear(X) A * X[1] + B * X[2] end -n = 600000 +n = 524288 lb = -ones(4) * π ub = ones(4) * π -sampler = SobolSample() +sampler = SobolSample(; R = QuasiMonteCarlo.OwenScramble(; base = 2, pad = 19, rng = Random.default_rng())) A, B = QuasiMonteCarlo.generate_design_matrices(n, lb, ub, sampler) res1 = gsa(ishi, Sobol(order = [0, 1, 2]), A, B) @@ -48,10 +48,7 @@ res1 = gsa(ishi, Sobol(order = [0, 1, 2], nboot = 20), A, B) 0.0 0.0 4.279200031668718e-5 1.2542212940962112e-5; 0.0 0.0 0.0 -7.998213172266514e-7; 0.0 0.0 0.0 0.0] atol=1e-4 @test res1.S1_Conf_Int≈[ - 0.00013100970128286063, - 0.00014730548523359544, - 7.398816006175431e-5, - 0.0 + 0.00018057916212640867, 0.0002327582551601999, 9.116874912775071e-5, 0.0 ] atol=1e-4 @test res1.ST_Conf_Int≈[ 5.657364947147881e-5, @@ -94,7 +91,7 @@ ishigami.fun <- function(X) { B <- 0.1 A * X[, 1] + B * X[, 2] } -n <- 6000000 +n <- 524288 X1 <- data.frame(matrix(runif(4 * n,-pi,pi), nrow = n)) X2 <- data.frame(matrix(runif(4 * n,-pi,pi), nrow = n)) sobol2007(ishigami.fun, X1, X2)