From f92f927d04c3f6e40188f403fbf1e59ab316f50d Mon Sep 17 00:00:00 2001 From: faridm69 <faridmohammadi69@gmail.com> Date: Tue, 17 Nov 2020 17:54:38 +0100 Subject: [PATCH] [surrogate] speeded up the eval_metamodel function. --- .../surrogate_models/RegressionFastARD.py | 2 ++ .../surrogate_models/surrogate_models.py | 26 ++++++++++++------- .../Test_AnalyticalFunction.py | 14 +++++----- 3 files changed, 26 insertions(+), 16 deletions(-) diff --git a/BayesValidRox/surrogate_models/RegressionFastARD.py b/BayesValidRox/surrogate_models/RegressionFastARD.py index 08c2a6de1..2f40199e1 100755 --- a/BayesValidRox/surrogate_models/RegressionFastARD.py +++ b/BayesValidRox/surrogate_models/RegressionFastARD.py @@ -245,6 +245,8 @@ class RegressionFastARD(LinearModel,RegressorMixin): start = np.argmax(proj) active[start] = True A[start] = XXd[start]/( proj[start] - var_y) + + warning_flag = 0 scores_ = [] diff --git a/BayesValidRox/surrogate_models/surrogate_models.py b/BayesValidRox/surrogate_models/surrogate_models.py index 9b819535f..a8bc70145 100644 --- a/BayesValidRox/surrogate_models/surrogate_models.py +++ b/BayesValidRox/surrogate_models/surrogate_models.py @@ -410,7 +410,7 @@ class Metamodel: return univ_vals #-------------------------------------------------------------------------------------------------------- - def PCE_create_Psi(self,BasisIndices,univ_p_val): + def PCE_create_Psi(self,BasisIndices,univ_p_val, active=None): """ This function assemble the design matrix Psi from the given basis index set INDICES and the univariate polynomial evaluations univ_p_val. @@ -429,7 +429,7 @@ class Metamodel: # number of basis elements P = BasisIndices.shape[0] - + # check that the variables have consistent sizes if NofPa != BasisIndices.shape[1]: raise ValueError("The shapes of BasisIndices and univ_p_val don't match!!") @@ -438,7 +438,8 @@ class Metamodel: Psi = np.ones((NofSamples, P),dtype=np.float32) # Assemble the Psi matrix for m in range(BasisIndices.shape[1]): - aa = np.where(BasisIndices[:,m]>0)[0] + aa = np.where(BasisIndices[:,m]>0)[0] + if active is not None: aa = list(set(aa).intersection(active)) # Take active terms try: basisIdx = BasisIndices[aa,m] Psi[:,aa] = np.multiply(Psi[:,aa] , np.reshape(univ_p_val[:, m, basisIdx], Psi[:,aa].shape)) @@ -525,7 +526,7 @@ class Metamodel: clf_poly = VBLinearRegression(fit_intercept=False) elif RegMethod.lower() == 'ebl': - clf_poly = EBLinearRegression(fit_intercept=False) + clf_poly = EBLinearRegression(fit_intercept=False,optimizer = 'em') # Fit clf_poly.fit(PSI, ModelOutput) @@ -551,7 +552,6 @@ class Metamodel: PolynomialDegrees_Sparse = BasisIndices.toarray() if sparsity else BasisIndices PSI_Sparse = PSI coeffs = clf_poly.coef_ - # Raise an error, if all coeffs are zero #raise ValueError('All the coefficients of the regression model are zero!') @@ -633,8 +633,8 @@ class Metamodel: score, LCerror = self.CorrectedLeaveoutError(sparsePSI, Coeffs, ModelOutput, clf_poly) # Check the convergence of noise for FastARD - # if self.RegMethod == 'FastARD' and clf_poly.alpha_ < np.finfo(np.float32).eps: - # score = -np.inf + if self.RegMethod == 'FastARD' and clf_poly.alpha_ < np.finfo(np.float32).eps: + score = -np.inf qNormScores[qidx] = score qAllCoeffs[str(qidx+1)] = Coeffs @@ -2178,7 +2178,15 @@ class Metamodel: y_mean, y_std = gp.predict(self.Samples, return_std=True) else: # Perdiction with PCE or pcekriging - PSI_Val = self.PCE_create_Psi(self.BasisDict[Outkey][Inkey], univ_p_val) + # Find active terms + try: + active_terms = np.nonzero(self.clf_poly[Outkey][Inkey].coef_)[0] + except: + active_terms = np.nonzero(self.CoeffsDict[Outkey][Inkey].coef_)[0] + + # Assemble Psi matrix + PSI_Val = self.PCE_create_Psi(self.BasisDict[Outkey][Inkey], univ_p_val, + active=active_terms) # Perdiction try: # with error bar @@ -2194,7 +2202,7 @@ class Metamodel: gp = self.gp_poly[Outkey][Inkey] y_gp_mean, y_gp_std = gp.predict(self.Samples, return_std=True) y_mean += y_gp_mean - y_std = y_gp_std + y_std += y_gp_std PCEOutputs_mean[:, idx] = y_mean PCEOutputs_std[:, idx] = y_std diff --git a/BayesValidRox/tests/AnalyticalFunction/Test_AnalyticalFunction.py b/BayesValidRox/tests/AnalyticalFunction/Test_AnalyticalFunction.py index 776394a0d..d00381c51 100755 --- a/BayesValidRox/tests/AnalyticalFunction/Test_AnalyticalFunction.py +++ b/BayesValidRox/tests/AnalyticalFunction/Test_AnalyticalFunction.py @@ -112,13 +112,13 @@ if __name__ == "__main__": # The degree with the lowest Leave-One-Out cross-validation (LOO) # error (or the highest score=1-LOO)estimator is chosen as the final # metamodel. - MetaModelOpts.MinPceDegree = 1#1 - MetaModelOpts.MaxPceDegree = 12#12 + MetaModelOpts.MinPceDegree = 1 + MetaModelOpts.MaxPceDegree = 8 #8 # q-quasi-norm 0<q<1 (default=1) - MetaModelOpts.q = 1.0 if ndim<5 else 0.75 #np.linspace(0.3,0.8, 3) - + MetaModelOpts.q = np.linspace(0.3,0.8,3) if ndim<5 else 0.75 + # Print summary of the regression results - # MetaModelOpts.DisplayFlag = True + MetaModelOpts.DisplayFlag = True # ------------------------------------------------ # ------ Experimental Design Configuration ------- @@ -129,7 +129,7 @@ if __name__ == "__main__": # One-shot (normal) or Sequential Adaptive (sequential) Design MetaModelOpts.ExpDesign.Method = 'normal' - NrofInitSamples = 400 #5*ndim #50 + NrofInitSamples = 350 #5*ndim #50 MetaModelOpts.ExpDesign.NrSamples = NrofInitSamples # Sampling methods @@ -245,7 +245,7 @@ if __name__ == "__main__": # Select the inference method BayesOpts.SamplingMethod = 'MCMC' - BayesOpts.MCMCnwalkers = 200 + BayesOpts.MCMCnwalkers = 300 # BayesOpts.MCMCinitSamples = np.zeros((1,10)) + 1e-3 * np.random.randn(200, 10) # BayesOpts.MCMCnSteps = 1000 import emcee -- GitLab