diff --git a/src/bayesvalidrox/surrogate_models/meta_model_engine.py b/src/bayesvalidrox/surrogate_models/meta_model_engine.py
index 3985da8a3d4ff124428c9f8710b4d029ca03716c..2df2dee5390ae4e6dc7eb88343c2469dbd88aad6 100644
--- a/src/bayesvalidrox/surrogate_models/meta_model_engine.py
+++ b/src/bayesvalidrox/surrogate_models/meta_model_engine.py
@@ -19,7 +19,6 @@ import os
 import gc
 import seaborn as sns
 from joblib import Parallel, delayed
-import resource
 from .exploration import Exploration
 
 
@@ -230,7 +229,6 @@ class MetaModelEngine():
                     print(f'\n>>>> Iteration number {itr_no} <<<<')
 
                     # Save the metamodel prediction before updating
-                    m_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
                     prevMetaModel_dict[itr_no] = deepcopy(self.MetaModel)
                     if itr_no > 1:
                         pc_model = prevMetaModel_dict[itr_no-1]
@@ -243,7 +241,6 @@ class MetaModelEngine():
                     Xnew, updatedPrior = self.opt_SeqDesign(TotalSigma2,
                                                             n_canddidate,
                                                             util_f)
-                    m_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
                     S = np.min(distance.cdist(Xinit, Xnew, 'euclidean'))
                     self.MetaModel.seqMinDist.append(S)
                     print(f"\nmin Dist from OldExpDesign: {S:2f}")
@@ -298,7 +295,6 @@ class MetaModelEngine():
 
                     # Train the surrogate model for new ExpDesign
                     self.MetaModel.train_norm_design(parallel=False)
-                    m_3 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
 
                     # -------- Evaluate the retrained surrogate model -------
                     # Extract Modified LOO from Output
@@ -340,7 +336,6 @@ class MetaModelEngine():
                         if len(self.MetaModel.valid_model_runs) != 0:
                             SeqValidError = np.vstack(
                                 (SeqValidError, ValidError))
-                    m_4 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
                     # -------- Caclulation of BME as accuracy metric -------
                     # Check if data is provided
                     if len(obs_data) != 0:
@@ -361,7 +356,6 @@ class MetaModelEngine():
                             self.__posteriorPlot(Posterior, parNames,
                                                  f'SeqPosterior_{postcnt}')
                         postcnt += 1
-                    m_5 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
 
                     # Check the convergence of the Mean&Std
                     if mc_ref and pce:
@@ -386,17 +380,9 @@ class MetaModelEngine():
                                    for LOO in ModifiedLOO):
                         break
 
-                    print(f"Memory itr {itr_no}: I: {m_2-m_1:.2f} MB")
-                    print(f"Memory itr {itr_no}: II: {m_3-m_2:.2f} MB")
-                    print(f"Memory itr {itr_no}: III: {m_4-m_3:.2f} MB")
-                    print(f"Memory itr {itr_no}: IV: {m_5-m_4:.2f} MB")
-                    m_6 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
-                    print(f"Memory itr {itr_no}: total: {m_6:.2f} MB")
-
                     # Clean up
                     if len(obs_data) != 0:
                         del out
-                    gc.collect()
                     print()
                     print('-'*50)
                     print()
@@ -610,7 +596,6 @@ class MetaModelEngine():
         del likelihoods
         del Y_MC
         del std_MC
-        gc.collect(generation=2)
 
         return -1 * U_J_d   # -1 is for minimization instead of maximization
 
@@ -909,7 +894,8 @@ class MetaModelEngine():
             logBME = np.log(np.nanmean(likelihoods))
 
             # Posterior-based expectation of likelihoods
-            postLikelihoods = likelihoods[accepted] / np.nansum(likelihoods[accepted])
+            postLikelihoods = likelihoods[accepted]
+            postLikelihoods /= np.nansum(likelihoods[accepted])
             postExpLikelihoods = np.mean(np.log(postLikelihoods))
 
             # Posterior-based expectation of prior densities
@@ -938,7 +924,6 @@ class MetaModelEngine():
         del likelihoods
         del Y_MC
         del std_MC
-        gc.collect(generation=2)
 
         return -1 * U_J_d   # -1 is for minimization instead of maximization
 
@@ -1907,10 +1892,7 @@ class MetaModelEngine():
                 )
 
             # Monte Carlo simulation for the candidate design
-            m_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
             Y_MC, std_MC = MetaModel.eval_metamodel(samples=X_MC)
-            m_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
-            print(f"\nMemory eval_metamodel in BME: {m_2-m_1:.2f} MB")
 
             # Likelihood computation (Comparison of data and
             # simulation results via PCE with candidate design)
@@ -2039,10 +2021,7 @@ class MetaModelEngine():
         valid_model_runs = MetaModel.valid_model_runs
 
         # Run the PCE model with the generated samples
-        m_1 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
         valid_PCE_runs, _ = MetaModel.eval_metamodel(samples=valid_samples)
-        m_2 = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss/1024
-        print(f"\nMemory eval_metamodel: {m_2-m_1:.2f} MB")
 
         rms_error = {}
         valid_error = {}
diff --git a/src/bayesvalidrox/surrogate_models/surrogate_models.py b/src/bayesvalidrox/surrogate_models/surrogate_models.py
index 009ba7105c0f3113148b244e7263db8d5538fb73..c51ea8c6679aaeec685da069154e8f460c7c4450 100644
--- a/src/bayesvalidrox/surrogate_models/surrogate_models.py
+++ b/src/bayesvalidrox/surrogate_models/surrogate_models.py
@@ -14,7 +14,6 @@ import sklearn.linear_model as lm
 from sklearn.gaussian_process import GaussianProcessRegressor
 import sklearn.gaussian_process.kernels as kernels
 import os
-import sys
 from joblib import Parallel, delayed
 import copy
 
@@ -25,7 +24,6 @@ from .reg_fast_ard import RegressionFastARD
 from .reg_fast_laplace import RegressionFastLaplace
 from .orthogonal_matching_pursuit import OrthogonalMatchingPursuit
 from .bayes_linear import VBLinearRegression, EBLinearRegression
-from .sequential_design import SeqDesign
 warnings.filterwarnings("ignore")
 # Load the mplstyle
 plt.style.use(os.path.join(os.path.split(__file__)[0],
@@ -65,7 +63,7 @@ class MetaModel():
         is `'fast'`. It means that in each iteration except the first one, only
         the coefficent are recalculated with the ordinary least square method.
     n_bootstrap_itrs : int
-        Number of iterations for the bootstrap sampling. The default is `100`.
+        Number of iterations for the bootstrap sampling. The default is `1`.
     pce_deg : int or list of int
         Polynomial degree(s). If a list is given, an adaptive algorithm is used
         to find the best degree with the lowest Leave-One-Out cross-validation
@@ -102,7 +100,7 @@ class MetaModel():
 
     def __init__(self, input_obj, model_obj, meta_model_type='PCE',
                  pce_reg_method='OLS', bootstrap_method='fast',
-                 n_bootstrap_itrs=100, pce_deg=1, pce_q_norm=1.0,
+                 n_bootstrap_itrs=1, pce_deg=1, pce_q_norm=1.0,
                  dim_red_method='no', verbose=False):
 
         self.input_obj = input_obj
@@ -137,8 +135,9 @@ class MetaModel():
                 self.pce_deg = np.array(self.pce_deg)
 
         if self.ExpDesign.method == 'sequential':
-            self.train_norm_design(Model)
-            metamodel = SeqDesign().train_seq_design(self)
+            raise Exception(
+                "Please use MetaModelEngine class for the sequential design!"
+                )
 
         elif self.ExpDesign.method == 'normal':
             self.ExpDesignFlag = 'normal'