diff --git a/BayesValidRox/surrogate_models/ExpDesigns.py b/BayesValidRox/surrogate_models/ExpDesigns.py
index 4cb853f86e0cdc92b34cb5e238d6bd78da8a8d93..2475233b658383e9d21f026d825487527837f730 100644
--- a/BayesValidRox/surrogate_models/ExpDesigns.py
+++ b/BayesValidRox/surrogate_models/ExpDesigns.py
@@ -79,6 +79,9 @@ class ExpDesigns:
         if len(Inputs.Marginals[0].InputValues) == 0 and self.SamplingMethod != 'user':
             ## Case I = polytype not arbitrary
             
+            # Execute initialization to get the boundtuples
+            self.Input_distributions, self.BoundTuples = self.Parameter_Initialization()
+            
             # Create ExpDesign in the actual space
             X = chaospy.generate_samples(NrSamples, domain=self.JDist , rule=SamplingMethod).T
         
diff --git a/BayesValidRox/surrogate_models/surrogate_models.py b/BayesValidRox/surrogate_models/surrogate_models.py
index c89d06f327c9506c718ee393844db1cc28b4ab2f..ad1b851c7851212855cb7abbd356eb83b5487192 100644
--- a/BayesValidRox/surrogate_models/surrogate_models.py
+++ b/BayesValidRox/surrogate_models/surrogate_models.py
@@ -110,7 +110,6 @@ class aPCE:
         
         self.PolynomialDegrees = Combos
         
-        
         return self.PolynomialDegrees
     
     #--------------------------------------------------------------------------------------------------------
@@ -507,12 +506,12 @@ class aPCE:
         return PolynomialDegrees_Sparse,PSI_Sparse,coeffs, clf_poly
     
     #--------------------------------------------------------------------------------------------------------
-    def adaptiveSparseaPCE(self, CollocationPoints, ModelOutput):
+    def adaptiveSparseaPCE(self, CollocationPoints, ModelOutput, varIdx):
         
         
         # Initialization
         self.DegreeArray = np.array(range(self.MinPceDegree,self.MaxPceDegree+1))
-        q = self.q 
+        qnorm = np.array(self.q) if not np.isscalar(self.q) else np.array([self.q])
         AllCoeffs = {}
         AllIndices_Sparse = {}
         Allclf_poly = {}
@@ -522,57 +521,70 @@ class aPCE:
         # methods
         bestScore = -np.inf
         
+        # some options for EarlyStop
+        # stop degree, if LOO error does not decrease n_checks_degree times
+        n_checks_degree = 2
+        # stop qNorm, if criterion isn't fulfilled n_checks_qNorm times
+        n_checks_qNorm = 2
+        nqnorms = len(qnorm)
+        qNormEarlyStop = True
+        if nqnorms < n_checks_qNorm+1:
+            qNormEarlyStop = False
+        
         #==============================================================================
         #==== basis adaptive polynomial chaos: repeat the calculation by
         #==== increasing polynomial degree until the target accuracy is reached
         #==============================================================================
         # For each degree check all q-norms and choose the best one
         scores = -np.inf*np.ones(self.DegreeArray.shape[0])
+        qNormScores = -np.inf*np.ones(nqnorms);
         scores_BRR = -np.inf*np.ones(self.DegreeArray.shape[0])
         
         for degIdx, deg in enumerate(self.DegreeArray):
             
-            # Generate the polynomial basis indices 
-            #print('Generating the polynomial basis indices...\n')
-            
-            BasisIndices = self.PolyBasisIndices(degree = deg, q=q)
-            
-            #print('Polynomial basis indices degree %s:\n'%deg, BasisIndices)
-            
-            # Evaluate the univariate polynomials on ExpDesig
-            #print('Evaluating the univariate polynomials on the experimental design...\n')
-            
-            univ_p_val = self.univ_basis_vals(CollocationPoints)
-            
-            #print('Evaluation of the univariate polynomials complete!\n\n')
-            
-            
-            # ---- Calulate the cofficients of aPCE ----------------------
-            #print('Calculating the coefficients with the Sparse Bayesian Learning algorithm...\n')
-            Psi = self.PCE_create_Psi(BasisIndices,univ_p_val)
-            
-            PolynomialDegrees_Sparse, PSI_Sparse, Coeffs, clf_poly = self.RS_Builder(Psi, ModelOutput,BasisIndices)
-            AllCoeffs[str(degIdx+1)] = Coeffs
-            AllIndices_Sparse[str(degIdx+1)] = PolynomialDegrees_Sparse
-            Allclf_poly[str(degIdx+1)] = clf_poly
-            AllnTerms[str(degIdx+1)] = BasisIndices.shape[0]
-            
-            # Calculate and save the score of LOOCV
-            score = self.CorrectedLeaveoutError(PSI_Sparse, Coeffs, ModelOutput, clf_poly)
-            
+            for qidx, q in enumerate(qnorm): 
+                # Generate the polynomial basis indices 
+                BasisIndices = self.PolyBasisIndices(degree = deg, q=q)
+                
+                
+                # Evaluate the univariate polynomials on ExpDesig
+                univ_p_val = self.univ_basis_vals(CollocationPoints)
+                
+                # Assemble the Psi matrix
+                Psi = self.PCE_create_Psi(BasisIndices,univ_p_val)
+                
+                # ---- Calulate the cofficients of aPCE ----------------------
+                PolynomialDegrees_Sparse, PSI_Sparse, Coeffs, clf_poly = self.RS_Builder(Psi, ModelOutput,BasisIndices)
+                AllCoeffs[str(degIdx+1)] = Coeffs
+                AllIndices_Sparse[str(degIdx+1)] = PolynomialDegrees_Sparse
+                Allclf_poly[str(degIdx+1)] = clf_poly
+                AllnTerms[str(degIdx+1)] = BasisIndices.shape[0]
+                
+                # Calculate and save the score of LOOCV
+                score = self.CorrectedLeaveoutError(PSI_Sparse, Coeffs, ModelOutput, clf_poly)
+                
+                qNormScores[qidx] = score
+                # EarlyStop check
+                # if there are at least n_checks_qNorm entries after the
+                # best one, we stop
+                if qNormEarlyStop and sum(np.isfinite(qNormScores)) > n_checks_qNorm:
+                    # If the error has increased the last two iterations, stop
+                    deltas = np.sign(np.diff(qNormScores[np.isfinite(qNormScores)]))
+                    if sum(deltas[-n_checks_qNorm+1:]) == 2:
+                        # stop the q-norm loop here
+                        break
+                
             # check the direction of the error (on average):
             # if it increases consistently stop the iterations
-#            n_checks_degree = 2
-#            if len(scores[scores!=-np.inf]) > n_checks_degree:
-#                ss = np.sign(score - np.max(scores[scores!=-np.inf])) #ss<0 error decreasing
-#                print("ss:", ss)
-#                errorIncreases = np.sum(np.sum(ss[-2:])) <= -1*n_checks_degree
-#                
-#            if errorIncreases:
-#                break
+            if len(scores[scores!=-np.inf]) > n_checks_degree:
+                ss = np.sign(scores[scores!=-np.inf] - np.max(scores[scores!=-np.inf])) #ss<0 error decreasing
+                errorIncreases = np.sum(np.sum(ss[-2:])) <= -1*n_checks_degree
+                
+            if errorIncreases:
+                break
             
             # Store the score in the scores list 
-            scores[degIdx] = score
+            scores[degIdx] = np.max(qNormScores)
             
             
         # ------------------ Summary of results ------------------
@@ -584,6 +596,13 @@ class aPCE:
         LOOCVScore = np.nanmax(scores)
         P = AllnTerms[str(bestIdx)]
         
+        
+        print('The estimation of PCE coefficients converged at polynomial degree '
+              '%s and qNorm %s for output variable %s.'%(self.DegreeArray[bestIdx-1],
+                                                         q, varIdx+1))
+        print('Final ModLOO error estimate: %s'%(1-max(scores)))
+        print('\n'+'-'*50)
+        
         # ------------------ Summary of results ------------------
         if self.DisplayFlag == True:
             print('='*50)
@@ -764,7 +783,7 @@ class aPCE:
         if 'x_values' in OutputDict: del OutputDict['x_values']
 
         for key , OutputMatrix in OutputDict.items():
-            if not OptDesignFlag: print(">>>> Polynomial Coefficient of %s <<<<<<"%key)
+            if not OptDesignFlag: print(">>>> Calculating the PCE coefficients of %s with %s<<<<<<"%(key, self.RegMethod))
             for idx in range(OutputMatrix.shape[1]):
                 
                 # Only update the coeffs in the searching alg. of SeqExpDesign
@@ -772,7 +791,7 @@ class aPCE:
                     BasisIndices = self.BasisDict[key]["y_"+str(idx+1)]
                     self.CoeffsDict[key]["y_"+str(idx+1)] , self.BasisDict[key]["y_"+str(idx+1)], self.ScoresDict[key]["y_"+str(idx+1)], self.clf_poly[key]["y_"+str(idx+1)] = self.CoeffsUpdateaPCE(CollocationPoints, OutputMatrix[:,idx], BasisIndices)
                 else:
-                    self.CoeffsDict[key]["y_"+str(idx+1)] , self.BasisDict[key]["y_"+str(idx+1)], self.ScoresDict[key]["y_"+str(idx+1)], self.clf_poly[key]["y_"+str(idx+1)] = self.adaptiveSparseaPCE(CollocationPoints, OutputMatrix[:,idx])
+                    self.CoeffsDict[key]["y_"+str(idx+1)] , self.BasisDict[key]["y_"+str(idx+1)], self.ScoresDict[key]["y_"+str(idx+1)], self.clf_poly[key]["y_"+str(idx+1)] = self.adaptiveSparseaPCE(CollocationPoints, OutputMatrix[:,idx],idx)
 
         
         return self
@@ -1829,9 +1848,9 @@ class aPCE:
         ObservationData = self.Observations
         
         # Check if data is provided
+        TotalSigma2 = np.empty((0,1))
         if len(ObservationData) != 0:
             # ---------- Prepare diagonal enteries for co-variance matrix ----------
-            TotalSigma2 = np.empty((0,1))
             for keyIdx, key in enumerate(Model.Output.Names):
                 optSigma = 'B'
                 sigma2 = np.array(self.Discrepancy.Parameters[key])
diff --git a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py
index 7b0386265f08c3d7d10b467c359ca53460cc175d..abbd2353fc8c30e1cc0188e83a97cba30a87dd58 100644
--- a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py
+++ b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py
@@ -97,7 +97,7 @@ if __name__ == "__main__":
     # 1)OLS: Ordinary Least Square  2)BRR: Bayesian Ridge Regression 
     # 3)LARS: Least angle regression  4)ARD: Bayesian ARD Regression
     
-    MetaModelOpts.RegMethod = 'BRR'
+    MetaModelOpts.RegMethod = 'OLS'
     
     # Print summary of the regression results
     #MetaModelOpts.DisplayFlag = True
@@ -109,7 +109,7 @@ if __name__ == "__main__":
     
     # One-shot (normal) or Sequential Adaptive (sequential) Design
     MetaModelOpts.ExpDesign.Method = 'sequential'
-    NrofInitSamples = 20 #20
+    NrofInitSamples = 20
     MetaModelOpts.ExpDesign.NrSamples = NrofInitSamples
     
     # Sampling methods
@@ -149,21 +149,21 @@ if __name__ == "__main__":
     MetaModelOpts.ExpDesign.MaxFunItr = 200
     
     # Use when 'Voronoi' or 'MC' or 'LHS' chosen
-    MetaModelOpts.ExpDesign.NCandidate = 1000
+    MetaModelOpts.ExpDesign.NCandidate = 10
     MetaModelOpts.ExpDesign.NrofCandGroups = 4
     
     # -------- Exploitation ------
     # 1)'BayesOptDesign' 2)'VarOptDesign' 3)'alphabetic' 4)'Space-filling'
-    MetaModelOpts.ExpDesign.ExploitMethod = 'VarOptDesign'
+    MetaModelOpts.ExpDesign.ExploitMethod = 'BayesOptDesign'
     
     # BayesOptDesign -> when data is available
     # 1)DKL (Kullback-Leibler Divergence) 2)DPP (D-Posterior-percision)
     # 3)APP (A-Posterior-percision) 
-    #MetaModelOpts.ExpDesign.UtilityFunction = 'DKL' #['DKL', 'DPP']
+    MetaModelOpts.ExpDesign.UtilityFunction = 'DKL' #['DKL', 'DPP']
     
     # VarBasedOptDesign -> when data is not available
     # Only with Vornoi >>> 1)Entropy 2)EIGF, 3)ALM, 4)LOOCV
-    MetaModelOpts.ExpDesign.UtilityFunction = 'Entropy'#['EIGF', 'Entropy', 'LOOCV']
+    #MetaModelOpts.ExpDesign.UtilityFunction = 'Entropy'#['EIGF', 'Entropy', 'LOOCV']
     
     # alphabetic
     # 1)D-Opt (D-Optimality) 2)A-Opt (A-Optimality)