Skip to content
Snippets Groups Projects
Commit 34e87fad authored by kohlhaasrebecca's avatar kohlhaasrebecca
Browse files

[Fix] Bug fixes in SeqDesign

All combinations of tradeoff-exploration-exploitation run through with SeqDesign. Reverted BayesInf to the current develop branch
parent e8a26017
No related branches found
No related tags found
2 merge requests!32Merge last changes for new release,!30Moved AL into SeqDesign and restructured score combination
Showing
with 135 additions and 44 deletions
File deleted
File deleted
digraph classinteraction {
node [shape=box]
PyLinkForwardModel
BayesInference
BayesModelComparison
Discrepancy
MCMC
PostProcessing
BayesLinearRegression
EBLinearRegression
VBLinearRegression
Engine
ExpDesigns
Exploration
InputSpace
Input
Marginal
OrthogonalMatchingPursuit
RegressionFastARD
RegressionFastLaplace
MetaModel
node [shape=ellipse]
gelman_rubin
within_range
adaptPlot
apoly_construction
gamma_mean
hellinger_distance
logpdf
subdomain
eval_rec_rule
eval_rec_rule_arbitrary
eval_univ_basis
poly_rec_coeffs
check_ranges
cross_truncate
glexindex
corr
update_precisions
corr_loocv_error
create_psi
gaussian_process_emulator
Input -> Marginal [label=""]
}
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 20 14:48:43 2019
@author: farid
"""
import numpy as np
import scipy.stats as stats
import scipy.stats as st
import seaborn as sns
def analytical_function(xx, t=None):
"""
Analytical Non-Gaussian Function
Authors: Farid Mohammadi, University of Stuttgart
Sergey Oladyshkin, University of Stuttgart
Questions/Comments: Please email Farid Mohammadi at:
farid.mohammadi@iws.uni-stuttgart.de
For function details and reference information, see:
https://doi.org/10.3390/e21111081
Parameters
----------
xx : array
[x1, x2, ..., xn] where xn ~ Uinform(-5, 5).
t : array, optional
vector of times. The default is None. ( k − 1 ) /9 and k = 1,..., 10
Returns
-------
array
row vector of time vectors (s, t).
"""
nParamSets, nParams = xx.shape
if t is None:
t = np.arange(0, 10, 1.) / 9
term1 = (xx[:, 0]**2 + xx[:, 1] - 1)**2
term2 = xx[:, 0]**2
term3 = 0.1 * xx[:, 0] * np.exp(xx[:, 1])
term5 = 0
if nParams > 2:
for i in range(2, nParams):
term5 = term5 + xx[:, i]**3/i
const = term1 + term2 + term3 + 1 + term5
# Compute time dependent term
term4 = np.zeros((nParamSets, len(t)))
for idx in range(nParamSets):
term4[idx] = -2 * xx[idx, 0] * np.sqrt(0.5*t)
Output = term4 + np.repeat(const[:, None], len(t), axis=1)
return {'x_values': t, 'Z': Output[0]}
if __name__ == "__main__":
MCSize = 10000
ndim = 10
sigma = 2
# -------------------------------------------------------------------------
# ----------------------- Synthetic data generation -----------------------
# -------------------------------------------------------------------------
t = np.arange(0, 10, 1.) / 9
MAP = np.zeros((1, ndim))
synthethicData = analytical_function(MAP, t=t)
# -------------------------------------------------------------------------
# ---------------------- Generate Prior distribution ----------------------
# -------------------------------------------------------------------------
xx = np.zeros((MCSize, ndim))
params = (-5, 5)
for idxDim in range(ndim):
lower, upper = params
xx[:, idxDim] = stats.uniform(
loc=lower, scale=upper-lower).rvs(size=MCSize)
# -------------------------------------------------------------------------
# ------------- BME and Kullback-Leibler Divergence -----------------------
# -------------------------------------------------------------------------
Outputs = analytical_function(xx, t=t)
cov_matrix = np.diag(np.repeat(sigma**2, synthethicData.shape[1]))
Likelihoods = st.multivariate_normal.pdf(
Outputs['Z'], mean=synthethicData[1], cov=cov_matrix)
sns.kdeplot(np.log(Likelihoods[Likelihoods > 0]),
shade=True, color="g", label='Ref. Likelihood')
normLikelihood = Likelihoods / np.nanmax(Likelihoods)
# Random numbers between 0 and 1
unif = np.random.rand(1, MCSize)[0]
# Reject the poorly performed prior
accepted = normLikelihood >= unif
# Prior-based estimation of BME
logBME = np.log(np.nanmean(Likelihoods))
print(f'\nThe Naive MC-Estimation of BME is {logBME:.5f}.')
# Posterior-based expectation of likelihoods
postExpLikelihoods = np.mean(np.log(Likelihoods[accepted]))
# Calculate Kullback-Leibler Divergence
KLD = postExpLikelihoods - logBME
print("The Kullback-Leibler divergence estimation is {KLD:.5f}.")
# -------------------------------------------------------------------------
# ----------------- Save the arrays as .npy files -------------------------
# -------------------------------------------------------------------------
if MCSize > 500000:
np.save(f"data/refBME_KLD_{ndim}.npy", (logBME, KLD))
np.save(f"data/mean_{ndim}.npy", np.mean(Outputs['Z'], axis=0))
np.save(f"data/std_{ndim}.npy", np.std(Outputs['Z'], axis=0))
else:
np.save(f"data/Prior_{ndim}.npy", xx)
np.save(f"data/origModelOutput_{ndim}.npy", Outputs[1:])
np.save(f"data/validLikelihoods_{ndim}.npy", Likelihoods)
File added
File added
File added
File added
File added
File added
File added
File added
File added
File added
File added
File added
File added
File added
File added
File added
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment