diff --git a/BayesValidRox/BayesInference/BayesInference.py b/BayesValidRox/BayesInference/BayesInference.py index a1c2142268628dda713308d32697e4a4733da3ef..950e16ce9aa5014f0ed9a28ad2328993ca51492c 100644 --- a/BayesValidRox/BayesInference/BayesInference.py +++ b/BayesValidRox/BayesInference/BayesInference.py @@ -178,7 +178,7 @@ class BayesInference: logL = multivariate_normal.logpdf(TotalOutputs, mean=Data, cov=covMatrix) except: logL = -np.inf - + return logL #-------------------------------------------------------------------------------------------------------- diff --git a/BayesValidRox/surrogate_models/surrogate_models.py b/BayesValidRox/surrogate_models/surrogate_models.py index b1508bb70aa61ac0256c565abfa45ccc461f717d..10188743594894ebbf63190d3bf589ed2863e6b9 100644 --- a/BayesValidRox/surrogate_models/surrogate_models.py +++ b/BayesValidRox/surrogate_models/surrogate_models.py @@ -885,7 +885,7 @@ class Metamodel: return pca, OutputMatrix #-------------------------------------------------------------------------------------------------------- - def GaussianProcessEmulator(self, X, y, autoSelect=False, varIdx=None): + def GaussianProcessEmulator(self, X, y, autoSelect=True, varIdx=None): from sklearn.gaussian_process import GaussianProcessRegressor from sklearn.gaussian_process.kernels import (RBF, Matern, RationalQuadratic, @@ -893,11 +893,10 @@ class Metamodel: ConstantKernel) - kernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)), - 1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1), - # ConstantKernel(0.1, (0.01, 10.0)) - # * (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.1, 10.0)) ** 2), - 1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0), + kernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-5, 1e5)), + 1.0 * RationalQuadratic(length_scale=0.2, alpha=0.1), + # 1.0 * ExpSineSquared(length_scale=1.0, length_scale_bounds=(1e-05, 1e05)), + 1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-5, 1e5), nu=1.5)] if autoSelect:# Automatic selection of the kernel @@ -905,7 +904,7 @@ class Metamodel: BME = [] for i, kernel in enumerate(kernels): gp[i] = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=2, - normalize_y=False) + normalize_y=True) # Fit to data using Maximum Likelihood Estimation of the parameters gp[i].fit(X, y) @@ -915,7 +914,8 @@ class Metamodel: gp = gp[np.argmax(BME)] else: - gp = GaussianProcessRegressor(kernel=kernels[-1], n_restarts_optimizer=2) + gp = GaussianProcessRegressor(kernel=kernels[-1], n_restarts_optimizer=2, + normalize_y=True) gp.fit(X, y) # Compute score