Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
b85c97f
Add files via upload
sitoryu Jul 30, 2020
929ae44
new model of Polyharmonic Splines
sitoryu Jul 30, 2020
49dde9c
calculate sobol indices
sitoryu Jul 30, 2020
ebdffc9
Added soindex, PolySpline and its functions
sitoryu Jul 30, 2020
1bb7e73
added test for polysplines
sitoryu Aug 2, 2020
04adcf2
added test for soindex
sitoryu Aug 2, 2020
0de144c
Update runtests.jl
sitoryu Aug 2, 2020
fbc4cb5
updated to use new input of sobolinidces
sitoryu Aug 16, 2020
81def26
using new constructor
sitoryu Aug 16, 2020
36af7fb
new input/constructor
sitoryu Aug 16, 2020
a0a096c
-> outer constructor to inner constructor
sitoryu Aug 16, 2020
dca63c4
reduced input requirements
sitoryu Aug 16, 2020
f5f6914
Rename polyspline.jl to polyharmonicspline.jl
sitoryu Aug 16, 2020
7693b96
no longer exports internal methods
sitoryu Aug 16, 2020
40c1718
Update and rename polyspline.jl to polyharmonicspline.jl
sitoryu Aug 16, 2020
ec0f403
Update and rename soindex.jl to sobolindices.jl
sitoryu Aug 16, 2020
68bb569
Update polyharmonicspline.jl
sitoryu Aug 16, 2020
85c5598
Update runtests.jl
sitoryu Aug 17, 2020
779bbc9
Merge branch 'master' of https://github.com/sitoryu/UncertaintyQuanti…
sitoryu Oct 18, 2021
0d3f917
script
sitoryu Oct 18, 2021
fe3412d
delete old tests
sitoryu Oct 18, 2021
dd40088
Create smc.jl
sitoryu Oct 18, 2021
b57668a
bayes script
sitoryu Oct 18, 2021
3d20469
Revert "Create smc.jl"
sitoryu Oct 18, 2021
7a67a7d
update
sitoryu Oct 20, 2021
9a13078
change examples
sitoryu Oct 20, 2021
a03c792
Update bayesianinference.jl
sitoryu Oct 20, 2021
df7192a
up
sitoryu Nov 1, 2021
08c1315
Merge branch 'master' of https://github.com/sitoryu/UncertaintyQuanti…
sitoryu Nov 1, 2021
92ac6a5
add random package to example
sitoryu Nov 1, 2021
dc394be
Merge branch 'FriesischScott:master' into master
sitoryu Nov 1, 2021
53a2095
CompatHelper: add new compat entry for "StatsBase" at version "0.33"
github-actions[bot] Nov 2, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
{
}
2 changes: 2 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ QuasiMonteCarlo = "8a4e6c94-4038-4cdc-81c3-7e6ffdb2a71b"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"

[compat]
Accessors = "0.1"
Expand All @@ -34,4 +35,5 @@ HaltonSequences = "0.1"
Mustache = "1.0"
QuasiMonteCarlo = "0.2"
Reexport = "0.2, 1.0"
StatsBase = "0.33"
julia = "1"
13 changes: 13 additions & 0 deletions demo/bayesianinference/binomialexample.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
using UncertaintyQuantification

MHexlikelihood(x) = x[1]^85 * (1-x[1])^(100-85)

prior(x) = 1

propdist = Normal(0, 0.2)
propdistsample() = [rand(propdist)]
propdistpdf(x) = pdf(propdist, x[1])

mcit=10000

rsample = mh(MHexlikelihood, prior, propdistpdf, propdistsample, [1], mcit, 100)
38 changes: 38 additions & 0 deletions demo/bayesianinference/correlationexample.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
using UncertaintyQuantification, Random
Random.seed!(2547);
N = 200
mu = [0, 0]
sig = [1 -0.55; -0.55 1]
mvnormal = MvNormal(mu, sig)


sam = zeros(200,2)
for i in 1:200 sam[i,:] = rand(mvnormal) end

Random.seed!()

function likelihood(x)
if x[1] >1 || x[1]<-1
rv = 0
else
rv = 1
for i in 1:N
rv = rv/(2*pi*sqrt(1-x[1]^2))*exp(-1/(2*(1-x[1]^2))*(sam[i,1]^2-2*x[1]*
sam[i,1]*sam[i,2]+sam[i,2]^2))
end
end
return rv
end

function prior(x)
if x[1] >1 || x[1]<-1
rv = 0
else
rv = 1/(1-x[1]^2)^(3/2)
end
return rv
end
priorsample(n) = rand(Uniform(-1,1),(n))


tmcmcsample = tmcmc(likelihood, prior, priorsample, 2000, 0.2)
54 changes: 54 additions & 0 deletions demo/bayesianinference/eigenvalueexample.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
using UncertaintyQuantification

function likelihood(x)
eigvaln = [1.51 0.33; 4.01 0.30; 3.16 0.27; 3.21 0.18; 2.19 0.33; 1.71 0.23;
2.73 0.21; 5.51 0.20; 1.95 0.11; 4.48 0.20; 1.43 0.16; 2.91 0.26; 3.81 0.23;
3.58 0.25; 2.62 0.25]

sig = [1 0.1]

la1 = (x[1] + 2*x[2] + sqrt(x[1].^2 + 4*x[2].^2))/2
la2 = (x[1] + 2*x[2] - sqrt(x[1].^2 + 4*x[2].^2))/2

model = [la1 la2]
rv = 0

for i in 1:2
for j in 1:15
rv += ((eigvaln[j,i] - model[i])/sig[i])^2
end
end
return exp(-1/2*rv)
end


tuningsig = [0.04 0; 0 0.04]
propdist = MvNormal([0;0], tuningsig)
propdistsample() = rand(propdist)
propdistpdf(x) = pdf(propdist, x)

priorsample(n) = rand(Uniform(0.001,4),(n, 2))

function uniformprior(x)
if x[1] > 0.001 && x[1] < 4 && x[2] > 0.001 && x[2] < 4
rv = 1
else
rv = 0
end
return rv
end

function term(sset, j)
#cov = std(sset)/mean(sset)
#coefficient of variance is an alternative way to terminate the sampler
rv = true
if j >= 1 rv = false end
return rv
end


tmcmcexsample = tmcmc(likelihood, uniformprior, priorsample, 1000, 0.001)

smcexsample = smc(likelihood, uniformprior, priorsample, propdistpdf, propdistsample, 1000, term)

mhexsample = mh(likelihood, uniformprior, propdistpdf, propdistsample, [2.84, 2.33], 1000, 0)
47 changes: 47 additions & 0 deletions demo/bayesianinference/point_changeexample.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
using UncertaintyQuantification, Random

Random.seed!(2547);
N = 100
a = 2
b = 1

changepoint = floor(Int, N*rand(Uniform(0,1)))

lambda = Gamma(a, 1/b)

lambdas = zeros(N)

for i in 1:N
if i<changepoint
lambdas[i] = rand(lambda) *changepoint
else
lambdas[i] = rand(lambda) *(N-changepoint)
end
end

sampledata = [rand(Poisson(i)) for i in lambdas]
Random.seed!()

x = sampledata


lambda1(in) = rand(Gamma(a + sum(x[1:changepoint-1]), 1/(changepoint + b)))
lambda2(in) = rand(Gamma(a + sum(x[changepoint:N]), 1/(N - changepoint + b)))

function multinomialN(in)
propn = zeros(N)
for i in 1:N
propn[i] = sum(x[1:i-1])*log(in[1]) - i*in[1] + sum(x[i:N])*log(in[2]) - (N-changepoint)*in[2]
end
propn = exp.(propn .- maximum(propn))
multiN = rand(Multinomial(1, propn./sum(propn)))
rv = 0
for i in 1:N
if multiN[i]==1 rv = i end
end
return rv
end

condist = [lambda1, lambda2, multinomialN]

rsample = gibbssample(condist, [3, 6, 50], 10000, 200)
38 changes: 38 additions & 0 deletions demo/bayesianinference/springconstantexample.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
using UncertaintyQuantification, Random

Random.seed!(4183);

N = 10
normal = Normal(0, 0.005)

F = [5, 5, 10, 10, 15, 15, 20, 20, 25, 25]./1.02

noisesamples = [F[i]/387 + rand(normal) for i in 1:N]

observations = hcat(F,noisesamples)
Random.seed!()

function likelihood(x)
rv = 0
variation = var(observations[:,1]./observations[:,2])
for i in 1:N
rv += (observations[i,1]/observations[i,2] - x[1])^2
end
return exp(-rv/(2*variation))
end

prior(x) = 1
priorsample(n) = rand(Uniform(1,1000),(n))

propdist = Normal(0, 1)
propdistsample() = [rand(propdist)]
propdistpdf(x) = pdf(propdist, x[1])

function term(sset, j)
rv = true
if j >= 2 rv = false end
return rv
end


smcsample = smc(likelihood, prior, priorsample, propdistpdf, propdistsample, 5000, term)
9 changes: 9 additions & 0 deletions src/UncertaintyQuantification.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ using Mustache
using QuasiMonteCarlo
using Random
using Reexport
using StatsBase

@reexport using Distributions

Expand Down Expand Up @@ -77,6 +78,13 @@ export to_copula_space
export to_physical_space!
export to_standard_normal_space
export to_standard_normal_space!
export mh
export gibbssample
export smc
export tmcmc
export grconvergence
export calcequaltails


include("inputs/inputs.jl")
include("inputs/parameter.jl")
Expand All @@ -94,6 +102,7 @@ include("models/polyharmonicspline.jl")

include("sensitivity/gradient.jl")

include("simulations/bayesianinference.jl")
include("simulations/linesampling.jl")
include("simulations/montecarlo.jl")
include("simulations/subset.jl")
Expand Down
Loading