From 9311723adad546943b3f657a8c26c6e4d5b0eef7 Mon Sep 17 00:00:00 2001
From: faridm69 <faridmohammadi69@gmail.com>
Date: Wed, 15 Jul 2020 13:38:43 +0200
Subject: [PATCH] [surrogate][FastARD] minor changes

---
 .../surrogate_models/surrogate_models.py      | 22 +++++++++++--------
 .../Test_AnalyticalFunction.py                | 10 ++++-----
 2 files changed, 18 insertions(+), 14 deletions(-)

diff --git a/BayesValidRox/surrogate_models/surrogate_models.py b/BayesValidRox/surrogate_models/surrogate_models.py
index 3518726ce..12e86cb53 100644
--- a/BayesValidRox/surrogate_models/surrogate_models.py
+++ b/BayesValidRox/surrogate_models/surrogate_models.py
@@ -651,18 +651,21 @@ class aPCE:
             
             # Leave the loop, if FastARD did not converge.
             if self.RegMethod == 'FastARD' and not clf_poly.converged:
+                print("Degree {0} not converged!".format(deg))
                 break
-            
+                
             # Store the score in the scores list
             bestqIdx = np.nanargmax(qNormScores)
             scores[degIdx] = qNormScores[bestqIdx] #np.max(qNormScores)
             
+            
             AllCoeffs[str(degIdx+1)] = qAllCoeffs[str(bestqIdx+1)]
             AllIndices_Sparse[str(degIdx+1)] = qAllIndices_Sparse[str(bestqIdx+1)]
             Allclf_poly[str(degIdx+1)] = qAllclf_poly[str(bestqIdx+1)]
             AllnTerms[str(degIdx+1)] = qAllnTerms[str(bestqIdx+1)]
             AllLCerror[str(degIdx+1)] = qAllLCerror[str(bestqIdx+1)]
             
+            
             # check the direction of the error (on average):
             # if it increases consistently stop the iterations
             if len(scores[scores!=-np.inf]) > n_checks_degree:
@@ -860,7 +863,7 @@ class aPCE:
         var = np.cumsum(np.round(covar_matrix.explained_variance_ratio_, decimals=5)*100)
         
         try:
-            selected_n_components = np.where(var>=99.99)[0][0] + 1
+            selected_n_components = np.where(var>=99.999)[0][0] + 1
         except:
             selected_n_components = min(n_samples, n_features)
 
@@ -946,13 +949,14 @@ class aPCE:
             M_uptoMax = lambda maxDeg: np.array([math.factorial(ndim+d)//(math.factorial(ndim)*math.factorial(d))  for d in range(1,maxDeg+1)])
             deg = range(1,maxDeg+1)[np.argmin(abs(M_uptoMax(maxDeg)-nSamples*ndim*d))]
             self.q = np.array(self.q) if not np.isscalar(self.q) else np.array([self.q])
-            
-            if deg not in self.DegreeArray:
-                self.allBasisIndices = self.AutoVivification()
-                self.DegreeArray = np.array([deg])#np.arange(self.MinPceDegree,deg+1)
-                # Generate the polynomial basis indices
-                for qidx, q in enumerate(self.q):
-                    self.allBasisIndices[str(deg)][str(q)] = self.PolyBasisIndices(degree=deg, q=q)
+            self.DegreeArray = np.array([deg]) #np.arange(self.MinPceDegree,deg+1) #or np.array([deg])
+            
+            for deg in self.DegreeArray:
+                # self.allBasisIndices = self.AutoVivification()
+                if deg not in self.allBasisIndices.keys():
+                    # Generate the polynomial basis indices
+                    for qidx, q in enumerate(self.q):
+                        self.allBasisIndices[str(deg)][str(q)] = self.PolyBasisIndices(degree=deg, q=q)
 
             
         # Evaluate the univariate polynomials on ExpDesign
diff --git a/BayesValidRox/tests/AnalyticalFunction/Test_AnalyticalFunction.py b/BayesValidRox/tests/AnalyticalFunction/Test_AnalyticalFunction.py
index 8f7d7965a..ac097c783 100755
--- a/BayesValidRox/tests/AnalyticalFunction/Test_AnalyticalFunction.py
+++ b/BayesValidRox/tests/AnalyticalFunction/Test_AnalyticalFunction.py
@@ -93,7 +93,7 @@ if __name__ == "__main__":
     # error (or the highest score=1-LOO)estimator is chosen as the final 
     # metamodel.
     MetaModelOpts.MinPceDegree = 1 #12
-    MetaModelOpts.MaxPceDegree = 6 #12
+    MetaModelOpts.MaxPceDegree = 10 #12
     
     # q-quasi-norm 0<q<1 (default=1)
     MetaModelOpts.q = 1.0 if ndim<5 else 0.65
@@ -105,7 +105,7 @@ if __name__ == "__main__":
     # 5)FastARD: Fast Bayesian ARD Regression
     # 6)SGDR: Stochastic gradient descent learning
     # MetaModelOpts.metaModel = 'PCEKriging'
-    MetaModelOpts.RegMethod = 'ARD'
+    MetaModelOpts.RegMethod = 'FastARD'
     MetaModelOpts.DimRedMethod = 'PCA'
     
     # Print summary of the regression results
@@ -117,8 +117,8 @@ if __name__ == "__main__":
     MetaModelOpts.addExpDesign()
     
     # One-shot (normal) or Sequential Adaptive (sequential) Design
-    MetaModelOpts.ExpDesign.Method = 'normal'
-    NrofInitSamples = 100 #75
+    MetaModelOpts.ExpDesign.Method = 'sequential'
+    NrofInitSamples = 10 #75
     MetaModelOpts.ExpDesign.NrSamples = NrofInitSamples
     
     # Sampling methods
@@ -128,7 +128,7 @@ if __name__ == "__main__":
     
     # Sequential experimental design (needed only for sequential ExpDesign)
     MetaModelOpts.ExpDesign.NrofNewSample = 1
-    MetaModelOpts.ExpDesign.MaxNSamples = 100 #150
+    MetaModelOpts.ExpDesign.MaxNSamples = 50 #150
     MetaModelOpts.ExpDesign.ModifiedLOOThreshold = 1e-16
     
     # Defining the measurement error, if it's known a priori
-- 
GitLab