From 8c57cfcd4d4cb9186a48ac5072e87445c8fa7af1 Mon Sep 17 00:00:00 2001 From: Farid Mohammadi <farid.mohammadi@iws.uni-stuttgart.de> Date: Sun, 29 Mar 2020 17:20:52 +0200 Subject: [PATCH] [surrogate] minor changes. --- .../PostProcessing/PostProcessing.py | 2 +- BayesValidRox/surrogate_models/ExpDesigns.py | 5 +- .../surrogate_models/surrogate_models.py | 55 ++++++++++--------- .../AnalyticalFunctionComplex_Test.py | 4 +- .../AnalyticalFunction_Test.py | 14 ++--- .../Co2BenchmarkTest/Test_CO2Benchmark.py | 2 +- 6 files changed, 44 insertions(+), 38 deletions(-) diff --git a/BayesValidRox/PostProcessing/PostProcessing.py b/BayesValidRox/PostProcessing/PostProcessing.py index 5e597f6c6..a0a0ca30a 100644 --- a/BayesValidRox/PostProcessing/PostProcessing.py +++ b/BayesValidRox/PostProcessing/PostProcessing.py @@ -144,7 +144,7 @@ class PostProcessing: idx = 0 for Inkey, InIdxValues in ValuesDict.items(): - if PCEModel.RegMethod == 'GPR': + if PCEModel.RegMethod == 'GPE': gp = PCEModel.clf_poly[Outkey][Inkey] y_mean, y_std = gp.predict(Samples, return_std=True) diff --git a/BayesValidRox/surrogate_models/ExpDesigns.py b/BayesValidRox/surrogate_models/ExpDesigns.py index 247ce83ce..8f9f0df18 100644 --- a/BayesValidRox/surrogate_models/ExpDesigns.py +++ b/BayesValidRox/surrogate_models/ExpDesigns.py @@ -96,9 +96,10 @@ class ExpDesigns: if self.SamplingMethod == 'user': X = self.X self.NrSamples = len(X) - self.Input_distributions, self.BoundTuples = self.Parameter_Initialization() + if len(self.Input_distributions)==0: + self.Input_distributions, self.BoundTuples = self.Parameter_Initialization() - elif self.SamplingMethod == 'random': # + elif self.SamplingMethod == 'random': X = self.MCSampling(NrSamples) elif self.SamplingMethod == 'PCM' or self.SamplingMethod == 'LSCM': diff --git a/BayesValidRox/surrogate_models/surrogate_models.py b/BayesValidRox/surrogate_models/surrogate_models.py index 313f82f27..169915f47 100644 --- a/BayesValidRox/surrogate_models/surrogate_models.py +++ b/BayesValidRox/surrogate_models/surrogate_models.py @@ -607,9 +607,11 @@ class aPCE: P = AllnTerms[str(bestIdx)] + print('Output variable %s:'%(varIdx+1)) print('The estimation of PCE coefficients converged at polynomial degree ' - '%s and qNorm %s for output variable %s.'%(self.DegreeArray[bestIdx-1], - q, varIdx+1)) + '%s with %s terms (Sparsity index = %s).'%(self.DegreeArray[bestIdx-1], + len(PolynomialDegrees), + round(len(PolynomialDegrees)/P, 3))) print('Final ModLOO error estimate: %s'%(1-max(scores))) print('\n'+'-'*50) @@ -767,7 +769,7 @@ class aPCE: value = self[item] = type(self)() return value #-------------------------------------------------------------------------------------------------------- - def GPR(self, X, y, varIdx): + def GaussianProcessEmulator(self, X, y, varIdx): from sklearn.gaussian_process import GaussianProcessRegressor from sklearn.gaussian_process.kernels import (RBF, Matern) @@ -776,25 +778,26 @@ class aPCE: 1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0), nu=1.5)] - for kernel in kernels: - # Instantiate a Gaussian Process model - #kernel = C(1.0, (1e-3, 1e3)) * RBF(10, (1e-2, 1e2)) - #kernel = RBF(20, (1e-5, 1e5)) - gp = GaussianProcessRegressor(kernel=kernel, n_restarts_optimizer=9) - - # Fit to data using Maximum Likelihood Estimation of the parameters - gp.fit(X, y) - - # Compute score - residual = gp.predict(X) - y - normEmpErr = np.mean(residual**2)/np.var(y) + #for kernel in kernels: + # Instantiate a Gaussian Process model + #kernel = C(1.0, (1e-3, 1e3)) * RBF(10, (1e-2, 1e2)) + #kernel = RBF(20, (1e-5, 1e5)) + gp = GaussianProcessRegressor(kernel=kernels[0], n_restarts_optimizer=9) - print('\n'+'-'*50) - print('The estimation of GPR coefficients converged at for output variable %s.'%(varIdx+1)) - print('Final normalized emperical error estimate: %s'%(normEmpErr)) - print('\n'+'-'*50) + # Fit to data using Maximum Likelihood Estimation of the parameters + gp.fit(X, y) + + # Compute score + #residual = gp.predict(X) - y + #normEmpErr = np.mean(residual**2)/np.var(y) + Score = gp.score(X, y) + + print('\n'+'-'*50) + print('The estimation of GPE coefficients converged at for output variable %s.'%(varIdx+1)) + print('Final normalized emperical error estimate: %s'%(1-Score)) + print('\n'+'-'*50) - return gp, gp.alpha_, 1-normEmpErr + return gp, gp.alpha_, Score #-------------------------------------------------------------------------------------------------------- def create_PCE_normDesign(self, Model, OptDesignFlag=False): @@ -822,7 +825,8 @@ class aPCE: OutputDict = self.ModelOutputDict.copy() # Evaluate the univariate polynomials on ExpDesign - self.univ_p_val = self.univ_basis_vals(CollocationPoints) + if self.RegMethod != 'GPE': + self.univ_p_val = self.univ_basis_vals(CollocationPoints) if 'x_values' in OutputDict: del OutputDict['x_values'] @@ -832,8 +836,9 @@ class aPCE: if not OptDesignFlag: print(">>>> Calculating the PCE coefficients of %s with %s<<<<<<"%(key, self.RegMethod)) for idx in range(OutputMatrix.shape[1]): - if self.RegMethod== 'GPR': # TODO: Change RegMethod to metamodel type - self.clf_poly[key]["y_"+str(idx+1)], self.CoeffsDict[key]["y_"+str(idx+1)], self.ScoresDict[key]["y_"+str(idx+1)] = self.GPR(CollocationPoints, OutputMatrix[:,idx], idx) + if self.RegMethod == 'GPE': # TODO: Change RegMethod to metamodel type + self.clf_poly[key]["y_"+str(idx+1)], self.CoeffsDict[key]["y_"+str(idx+1)], self.ScoresDict[key]["y_"+str(idx+1)] = self.GaussianProcessEmulator(CollocationPoints, OutputMatrix[:,idx], idx) + else: # Only update the coeffs in the searching alg. of SeqExpDesign @@ -1565,7 +1570,7 @@ class aPCE: for Inkey, InIdxValues in ValuesDict.items(): idx = int(Inkey.split('_')[1]) - 1 - if self.RegMethod == 'GPR': + if self.RegMethod == 'GPE': gp = self.clf_poly[Outkey][Inkey] PCEOutputs_mean[:, idx], PCEOutputs_std[:, idx] = gp.predict(Samples, return_std=True) @@ -1817,7 +1822,7 @@ class aPCE: # Estimation of the integral via Monte Varlo integration SamplingMethod = 'random' - MCsize = 10000 + MCsize = 100000 ESS =0 while ((ESS > MCsize) or (ESS < 1)): diff --git a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunctionComplex_Test.py b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunctionComplex_Test.py index 2c92917ea..fc4aca052 100644 --- a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunctionComplex_Test.py +++ b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunctionComplex_Test.py @@ -121,7 +121,7 @@ if __name__ == "__main__": # One-shot (normal) or Sequential Adaptive (sequential) Design MetaModelOpts.ExpDesign.Method = 'sequential' - NrofInitSamples = 25 + NrofInitSamples = 5 MetaModelOpts.ExpDesign.NrSamples = NrofInitSamples # Sampling methods @@ -131,7 +131,7 @@ if __name__ == "__main__": # Sequential experimental design (needed only for sequential ExpDesign) MetaModelOpts.ExpDesign.NrofNewSample = 1 - MetaModelOpts.ExpDesign.MaxNSamples = 30 + MetaModelOpts.ExpDesign.MaxNSamples = 50 MetaModelOpts.ExpDesign.ModifiedLOOThreshold = 1e-10 # Defining the measurement error, if it's known a priori diff --git a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py index 4441dc6fe..3a5966722 100644 --- a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py +++ b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py @@ -98,7 +98,7 @@ if __name__ == "__main__": # 3)LARS: Least angle regression 4)ARD: Bayesian ARD Regression # 5)SGDRegressor: Stochastic gradient descent learning - MetaModelOpts.RegMethod = 'BRR' + MetaModelOpts.RegMethod = 'ARD' # Print summary of the regression results # MetaModelOpts.DisplayFlag = True @@ -110,7 +110,7 @@ if __name__ == "__main__": # One-shot (normal) or Sequential Adaptive (sequential) Design MetaModelOpts.ExpDesign.Method = 'sequential' - NrofInitSamples = 10 + NrofInitSamples = 5 MetaModelOpts.ExpDesign.NrSamples = NrofInitSamples # Sampling methods @@ -120,7 +120,7 @@ if __name__ == "__main__": # Sequential experimental design (needed only for sequential ExpDesign) MetaModelOpts.ExpDesign.NrofNewSample = 1 - MetaModelOpts.ExpDesign.MaxNSamples = 100 + MetaModelOpts.ExpDesign.MaxNSamples = 50 MetaModelOpts.ExpDesign.ModifiedLOOThreshold = 1e-16 # Defining the measurement error, if it's known a priori @@ -144,7 +144,7 @@ if __name__ == "__main__": #MetaModelOpts.ExpDesign.nReprications = 5 # -------- Exploration ------ #1)'Voronoi' 2)'random' 3)'latin_hypercube' 4)'dual annealing' - MetaModelOpts.ExpDesign.ExploreMethod = 'random' + MetaModelOpts.ExpDesign.ExploreMethod = 'latin_hypercube' # Use when 'dual annealing' chosen MetaModelOpts.ExpDesign.MaxFunItr = 200 @@ -155,18 +155,18 @@ if __name__ == "__main__": # -------- Exploitation ------ # 1)'BayesOptDesign' 2)'VarOptDesign' 3)'alphabetic' 4)'Space-filling' - MetaModelOpts.ExpDesign.ExploitMethod = 'VarOptDesign' + MetaModelOpts.ExpDesign.ExploitMethod = 'BayesOptDesign' # BayesOptDesign -> when data is available # 1)DKL (Kullback-Leibler Divergence) 2)DPP (D-Posterior-percision) # 3)APP (A-Posterior-percision) # 1)DKL (Kullback-Leibler Divergence) 2)BME (Bayesian model evidence) # 3)infEntropy (Entropy-based information gain) - # MetaModelOpts.ExpDesign.UtilityFunction = 'BME' #['DKL', 'DPP'] + MetaModelOpts.ExpDesign.UtilityFunction = 'DKL' #['DKL', 'DPP'] # VarBasedOptDesign -> when data is not available # Only with Vornoi >>> 1)Entropy 2)EIGF, 3)ALM, 4)LOOCV - MetaModelOpts.ExpDesign.UtilityFunction = 'Entropy' #['EIGF', 'Entropy', 'LOOCV'] + # MetaModelOpts.ExpDesign.UtilityFunction = 'Entropy' #['EIGF', 'Entropy', 'LOOCV'] # alphabetic # 1)D-Opt (D-Optimality) 2)A-Opt (A-Optimality) diff --git a/BayesValidRox/tests/Co2BenchmarkTest/Test_CO2Benchmark.py b/BayesValidRox/tests/Co2BenchmarkTest/Test_CO2Benchmark.py index 1b303b168..dffc36cd0 100644 --- a/BayesValidRox/tests/Co2BenchmarkTest/Test_CO2Benchmark.py +++ b/BayesValidRox/tests/Co2BenchmarkTest/Test_CO2Benchmark.py @@ -318,7 +318,7 @@ if __name__ == "__main__": figPosterior.set_size_inches((10,10)) figPosterior.savefig('./'+newpath+'/'+key+'.svg',bbox_inches='tight') - theta_true = (4.98777e-01 , 4.516252e-01, 1.461320e+00) + theta_true = MetaModelOpts.ExpDesign.MAP parNames = ['Inj. rate', 'Rel. permeability degree', 'Res. porosity'] # Extract the posterior with emulator surrogatePosterior = Bayes.Posterior_df -- GitLab