diff --git a/src/pem.jl b/src/pem.jl index b1b8762..6e34461 100644 --- a/src/pem.jl +++ b/src/pem.jl @@ -67,7 +67,7 @@ represented by the vector of elements d. Parameters ---------- -- d :: Vector +- d :: Vector{<:Real} Distribution under interest - N :: Integer Number of desired estimate points @@ -88,7 +88,7 @@ Returns """ function pem( - d::Vector, + d::Vector{<:Real}, N::Integer; mean_fun::Function=Distributions.mean, central_moment_fun::Function=Distributions.moment, @@ -202,5 +202,99 @@ function pem( x = ϵ .+ mean_value # results = NamedTuple{(:x, :p)}.(zip(x, p)) + return (x=x, p=p) +end + + +""" + pem(distributions, N; mean_fun=mean, central_moment_fun=moment, optimizer=HiGHS.Optimizer) + +Point Estimate Method to identify estimate points for K independent univariate distributions. + +For K independent distributions each with N concentrations, the multivariate representation +uses K*N total concentration points. Each concentration point for variable k at concentration n +has all other variables set to their means: + + x_{k,n} = (μ_1, ..., μ_{k-1}, x_{k,n}, μ_{k+1}, ..., μ_K) + +with weight p_{k,n} / K, where p_{k,n} is the univariate weight for distribution k at +concentration n. This ensures all weights sum to 1. + +This function implements the multivariate extension described in: +- H.P. Hong, An efficient point estimate method for probabilistic analysis, + Reliability Engineering and System Safety, 1998, https://doi.org/10.1016/S0951-8320(97)00071-9 + +Parameters +---------- +- distributions :: Vector{<:UnivariateDistribution} + Vector of K independent univariate distributions +- N :: Union{Integer, Vector{<:Integer}} + Number of desired estimate points per distribution. If an Integer is provided, + the same N is used for all distributions. If a Vector is provided, N[k] is + used for the k-th distribution. +- mean_fun :: Function (optional) + Function used to calculate the mean value of each distribution +- central_moment_fun :: Function (optional) + Function used to calculate the central moment of each distribution +- montecarlo_sampling :: Integer (optional, default 1000000) + Number of Monte Carlo samples used if a non-specific moment function is available +- optimizer (optional) + JuMP optimizer for executing the optimization + +Returns +------- +- (x, p) :: NamedTuple + - x :: Matrix{Float64} + Matrix of shape (K, M) where M = sum(N_1, ..., N_K). Each column is one + concentration point: only one variable deviates from its mean. + - p :: Vector{Float64} + Weight of each concentration point (p_{k,n} / K). + +""" +function pem( + distributions::Vector{<:UnivariateDistribution}, + N::Union{Integer, Vector{<:Integer}}; + mean_fun::Function=Distributions.mean, + central_moment_fun::Function=Distributions.moment, + montecarlo_sampling::Integer=1000000, + optimizer=DEFAULT_SOLVER, + ) + + K = length(distributions) + Ns = N isa Integer ? fill(N, K) : N + + @assert length(Ns) == K "Length of N vector must match the number of distributions (got $(length(Ns)), expected $K)" + + # Apply univariate pem to each distribution independently + results = [ + pem(distributions[k], Ns[k]; + mean_fun=mean_fun, + central_moment_fun=central_moment_fun, + montecarlo_sampling=montecarlo_sampling, + optimizer=optimizer) + for k in 1:K + ] + + # Mean of each distribution + μ = [mean_fun(distributions[k]) for k in 1:K] + + # Build K*N concentration points following Hong (1998): + # For variable k at concentration n, all other variables stay at their mean. + M = sum(Ns) + x = Matrix{Float64}(undef, K, M) + p = Vector{Float64}(undef, M) + + j = 1 + for k in 1:K + for n in 1:Ns[k] + # All variables at their mean, except variable k + x[:, j] .= μ + x[k, j] = results[k].x[n] + # Weight: p_{k,n} / K + p[j] = results[k].p[n] / K + j += 1 + end + end + return (x=x, p=p) end \ No newline at end of file diff --git a/test/Examples.jl b/test/Examples.jl index 07a2839..e14dec9 100644 --- a/test/Examples.jl +++ b/test/Examples.jl @@ -15,4 +15,25 @@ module Examples Example("Normal_truncated_9", truncated(Normal(1.0, 0.4), 0.0, +Inf), 9), ] + struct MultivariateExample + name # name of test + d # Vector of distributions + N # Number of estimate points (Integer or Vector) + end + + list_multivariate_examples = [ + MultivariateExample("Normal2_N2", [Normal(), Normal()], 2), + MultivariateExample("Normal2_N3", [Normal(), Normal()], 3), + MultivariateExample( + "Normal_TruncNormal_N2", + [Normal(), truncated(Normal(1.0, 0.4), 0.0, +Inf)], + 2, + ), + MultivariateExample( + "Normal_TruncNormal_mixed_N", + [Normal(), truncated(Normal(1.0, 0.4), 0.0, +Inf)], + [2, 3], + ), + ] + end \ No newline at end of file diff --git a/test/runtests.jl b/test/runtests.jl index b56922c..91e704a 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -50,12 +50,59 @@ function test_example(example_name, testing_function, args...) end +"Function to test multivariate examples" +function test_multivariate_example(example_name, testing_function, args...) + + # for reproducibility + Distributions.Random.seed!(SEED) + + # calculate simulations + calc_solution = testing_function(args...) + + path_solution = joinpath(BASE_FOLDER, "test", "testcases", string(testing_function) * "_multivariate", example_name * ".yml") + + # Sort columns lexicographically for deterministic comparison + I_calc = sortperm(eachcol(calc_solution.x), by=col -> Tuple(col)) + + x_sorted = calc_solution.x[:, I_calc] + p_sorted = calc_solution.p[I_calc] + + if isfile(path_solution) + # if the file exists run tests + proven_solution = YAML.load_file(path_solution) + + proven_x = Matrix(hcat([Float64.(row) for row in proven_solution["x"]]...)') + proven_p = Float64.(proven_solution["p"]) + + @test size(x_sorted) == size(proven_x) + @test vector_approx_test(vec(x_sorted), vec(proven_x)) + @test vector_approx_test(p_sorted, proven_p) + else + # otherwise create the tests + mkpath(dirname(path_solution)) + + dict_calc_solution = Dict( + "x" => [x_sorted[k, :] for k in 1:size(x_sorted, 1)], + "p" => p_sorted, + ) + + YAML.write_file(path_solution, dict_calc_solution) + @warn("Preloaded solution not found, then it has been created\nPath: $path_solution") + end + +end + list_examples = Examples.list_examples +list_multivariate_examples = Examples.list_multivariate_examples @testset "tests" begin for test in list_examples test_example(test.name, pem, test.d, test.N) end + + for test in list_multivariate_examples + test_multivariate_example(test.name, pem, test.d, test.N) + end end diff --git a/test/testcases/pem_multivariate/Normal2_N2.yml b/test/testcases/pem_multivariate/Normal2_N2.yml new file mode 100644 index 0000000..f1a6360 --- /dev/null +++ b/test/testcases/pem_multivariate/Normal2_N2.yml @@ -0,0 +1,16 @@ +x: + - + - -1.0 + - 0.0 + - 0.0 + - 1.0 + - + - 0.0 + - -1.0 + - 1.0 + - 0.0 +p: + - 0.25 + - 0.25 + - 0.25 + - 0.25 diff --git a/test/testcases/pem_multivariate/Normal2_N3.yml b/test/testcases/pem_multivariate/Normal2_N3.yml new file mode 100644 index 0000000..2c31f90 --- /dev/null +++ b/test/testcases/pem_multivariate/Normal2_N3.yml @@ -0,0 +1,22 @@ +x: + - + - -1.7320508075688776 + - 0.0 + - 0.0 + - 0.0 + - 0.0 + - 1.7320508075688772 + - + - 0.0 + - -1.7320508075688776 + - 0.0 + - 0.0 + - 1.7320508075688772 + - 0.0 +p: + - 0.0833333333333333 + - 0.0833333333333333 + - 0.33333333333333337 + - 0.33333333333333337 + - 0.08333333333333331 + - 0.08333333333333331 diff --git a/test/testcases/pem_multivariate/Normal_TruncNormal_N2.yml b/test/testcases/pem_multivariate/Normal_TruncNormal_N2.yml new file mode 100644 index 0000000..60cf308 --- /dev/null +++ b/test/testcases/pem_multivariate/Normal_TruncNormal_N2.yml @@ -0,0 +1,16 @@ +x: + - + - -1.0 + - 0.0 + - 0.0 + - 1.0 + - + - 1.0070551301947668 + - 0.6355198478077098 + - 1.4184929618467512 + - 1.0070551301947668 +p: + - 0.25 + - 0.26274342969642217 + - 0.23725657030357783 + - 0.25 diff --git a/test/testcases/pem_multivariate/Normal_TruncNormal_mixed_N.yml b/test/testcases/pem_multivariate/Normal_TruncNormal_mixed_N.yml new file mode 100644 index 0000000..593a5c6 --- /dev/null +++ b/test/testcases/pem_multivariate/Normal_TruncNormal_mixed_N.yml @@ -0,0 +1,19 @@ +x: + - + - -1.0 + - 0.0 + - 0.0 + - 0.0 + - 1.0 + - + - 1.0070551301947668 + - 0.4089295792763922 + - 1.0532985603054232 + - 1.7295827634987933 + - 1.0070551301947668 +p: + - 0.25 + - 0.10944862748508999 + - 0.3204603371606533 + - 0.07009103535425674 + - 0.25