From 6e56aefbb84d4fe9416e073e54a510031d122d45 Mon Sep 17 00:00:00 2001
From: faridm69 <faridmohammadi69@gmail.com>
Date: Fri, 18 Sep 2020 17:36:24 +0200
Subject: [PATCH] [surrogate][GPE] model selection activated and y_normalize
 has been set to True.

---
 BayesValidRox/BayesInference/BayesInference.py   |  2 +-
 .../surrogate_models/surrogate_models.py         | 16 ++++++++--------
 2 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/BayesValidRox/BayesInference/BayesInference.py b/BayesValidRox/BayesInference/BayesInference.py
index a1c214226..950e16ce9 100644
--- a/BayesValidRox/BayesInference/BayesInference.py
+++ b/BayesValidRox/BayesInference/BayesInference.py
@@ -178,7 +178,7 @@ class BayesInference:
             logL = multivariate_normal.logpdf(TotalOutputs, mean=Data, cov=covMatrix)
         except:
             logL = -np.inf
-        
+
         return logL
     
     #--------------------------------------------------------------------------------------------------------
diff --git a/BayesValidRox/surrogate_models/surrogate_models.py b/BayesValidRox/surrogate_models/surrogate_models.py
index b1508bb70..101887435 100644
--- a/BayesValidRox/surrogate_models/surrogate_models.py
+++ b/BayesValidRox/surrogate_models/surrogate_models.py
@@ -885,7 +885,7 @@ class Metamodel:
         return pca, OutputMatrix
 
     #--------------------------------------------------------------------------------------------------------
-    def GaussianProcessEmulator(self, X, y, autoSelect=False, varIdx=None):
+    def GaussianProcessEmulator(self, X, y, autoSelect=True, varIdx=None):
         
         from sklearn.gaussian_process import GaussianProcessRegressor
         from sklearn.gaussian_process.kernels import (RBF, Matern, RationalQuadratic,
@@ -893,11 +893,10 @@ class Metamodel:
                                               ConstantKernel)
 
 
-        kernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)),
-                   1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1),
-                   # ConstantKernel(0.1, (0.01, 10.0))
-                   #     * (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.1, 10.0)) ** 2),
-                   1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0),
+        kernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-5, 1e5)),
+                   1.0 * RationalQuadratic(length_scale=0.2, alpha=0.1),
+                   # 1.0 * ExpSineSquared(length_scale=1.0, length_scale_bounds=(1e-05, 1e05)),
+                   1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-5, 1e5),
                                 nu=1.5)]
         
         if autoSelect:# Automatic selection of the kernel
@@ -905,7 +904,7 @@ class Metamodel:
             BME = []
             for i, kernel in enumerate(kernels):
                 gp[i] = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=2,
-                                                  normalize_y=False)
+                                                  normalize_y=True)
             
                 # Fit to data using Maximum Likelihood Estimation of the parameters
                 gp[i].fit(X, y)
@@ -915,7 +914,8 @@ class Metamodel:
             gp = gp[np.argmax(BME)]    
         
         else:
-            gp = GaussianProcessRegressor(kernel=kernels[-1], n_restarts_optimizer=2)
+            gp = GaussianProcessRegressor(kernel=kernels[-1], n_restarts_optimizer=2,
+                                          normalize_y=True)
             gp.fit(X, y)
             
         # Compute score
-- 
GitLab