From 8e4d65b9406c308d9c50ba3d1487744a9f86022c Mon Sep 17 00:00:00 2001
From: kohlhaasrebecca <rebecca.kohlhaas@outlook.com>
Date: Tue, 19 Dec 2023 12:08:55 +0100
Subject: [PATCH] Static training, postprocessing and inference using engine

Code updated for static training, tests not done yet
---
 .coverage                                     | Bin 53248 -> 53248 bytes
 .../test_analytical_function.py               |  27 +-
 .../bayes_inference/bayes_inference.py        |  47 +-
 src/bayesvalidrox/bayes_inference/mcmc.py     |   6 +-
 .../post_processing/post_processing.py        |  10 +-
 src/bayesvalidrox/surrogate_models/engine.py  | 106 +---
 .../surrogate_models/exp_designs.py           |  32 ++
 .../surrogate_models/exp_designs_.py          | 508 ------------------
 .../surrogate_models/input_space.py           |   5 +-
 .../surrogate_models/surrogate_models.py      | 126 ++---
 10 files changed, 134 insertions(+), 733 deletions(-)
 delete mode 100644 src/bayesvalidrox/surrogate_models/exp_designs_.py

diff --git a/.coverage b/.coverage
index fdfc90cd88247a2682ddc7eb5c6dc7e6e44746ed..dfaacbba19de8851a6f5b72f5384aaa580e686ae 100644
GIT binary patch
delta 1806
zcmY+DeQXnD9LJx#-u3Q!uU)S%18r|tP?NE_Wr`xsZERy*yB7yiWiZCrYhiTlHcdy2
zbWRO8*w#FWKrpg@{6j(Goca$CD?tT9Fq#k$FicELh$ik25{T&P@75Q$=a2UHe7@h`
zJ-_F^x%jjxK5hDps`TmP4J1#=>!sV$+mc6|5$l9&LPVhX<9w0j3yYt-$qjN6`w?5t
zTxa?j-h9+tLVr(p(kgY48le=^H>OujkC8WK5)&lp*YU5_ReWTmKNQ|m92sdWz(qAx
zQe>ciq;nuVGz`eEfh9nAKxia;-8kB?RSktBuMW2lMLIgW!O`?6g+>c-X-)a;*&3c8
zd-K86YwmFYZoyT}HMprxQM*DzorB$xj&SD)^hBK?_GUx}p4zC=4j|Q8JN|bgOWT2L
z%@8llk46JI{ZgGk+j6?c=cV&<y2q#B<35450;yl@nD?zD6qsr|?yDDQ8F+h!Tk(~8
zJ1qg(k|X7Qg%EL*KSm3{+j3kBs5HOKsRcN;W3US@6tdtGf!#C*a&xvJueZND77BND
z$G{L~LF&j#xUA7`?qilc>{0_yw9kIp7$hO&n@CQ`&oe(Wd(FqprHsIS%)TJaN$*IS
zaNQE1y96_RLG*D$LX@&{GIz`JCI3EOEPf6z?Gm-mbk6je>2dPrOk$G6wVMNkjuV@k
zhypwZwFdPD9Ig={)X4T$+wwHs%@l;$(prWaYx&u;F9`&g`sn;#3c_q@tu?}wmxt-X
zrNKekxs!tYG7iX(p&ffD$S+NF!j7XachPE&d3-uy16GgIdGT~!Ck1)6v<5AUBMre@
zv%=wRO8>xM$8cyM3_uA}kR5m+H<gw{6y#>?%Kb7S;l|b&E$*ctGlOXX8!hA<kQI~8
zipL*p+f7?~D2NJt)}=9V0~DlWbeFX`s9bX<o3^0t(^(S9_vAn1-{f(5OiIFM7?!q5
z<zh<wPJB;%QK%NI0^%R=ExgZyEst<lxo^2m+;i+L_D6P-ZD31TFKc7TS*!9Dvq{z|
zg41*g&FLG?#AaeMQUL&W0TuJ^SmW^Os-VGsg+>i~&R5*2$oIhRDG!BEviwfnKi&mx
zVLWx-b7%6wSe2V5xKYxL4@|7b;|ES^juqf7UP0YH;@nB1%p#B}nr|3)<>Q6Jt9O`D
zvPeMj<9}lUX=n{{89oZdCQeJguPY?by?v)2zOn!7ifi)ayZ2Wkw6+kTDTH2GxO(OF
z^$4*jd6_&@Cb^8ET_vW)QxvM!X&hm3;_ic$jKipRJMgK4QN|ASn_YIi>fKMCR-s2x
zuM?xIYzFn&=2JZgzLKRY{Xi{;$V8Q!AQOqz!Q729+LC=`&W@lY5Q=AkX3m2{rMy1b
z3jdn51cbJ@3&lMp3V>Y-o*plfWf<ozRc2HB%{nwE5`~yK^b;c)_h2PFcc?-V!M#Id
zn+14)i^9aXs<QtdgOq0kFhm5r<#3sqhjJf}Ck{Wm-vZ@Y3yaXv>_l;+fDu_62ig-H
z+hc&_k#>Z}h-o_ua1HANboAJtXD$k-?eiy#o$DCD!_54X>DOa)AT;+^D%C*lTopa%
zZ(@}+GooCw+;E%GN;BMJ44LOs_ir2H8!Pz`&O`W@i9*k*67S+b*-p*SbBj$>IqNEO
zTcd#67%RE>3qM-6$o!{+NxK9Rc9m&C#AB1ACS6uBGgVE>Dtr$YB*qLkBXoUs<%zo*
Xh(XLGK@85eBzc+AO-RJse)#Y|<;B8e

delta 1590
zcmY*ZYiv_h96xtG=e}Qkx100<7#~a+j}XkjF}8J&u1whwWR8w)UFmRTb4wgECM{DL
z+hA)>j2b~m1pHu()_$FCiGU&@>IcmTL0}{%hA%`35K-p;cCD22<^F$<`~RPNe)rzQ
zNmJsa=}Wvh;5StxZAvRwuc_~-UL~#6$#e34nU{`BYsGV7qcATF2qJ%mujGE=`Z&yf
z!j_TmNf)u>AMh|<VY+6TGF7Kje;2V0evFcCYOY;FcUQ^OQzZ}|Fy0*FBHjXB(jxI_
zEFM|ecOc{`a7mU>WN=Ste^=x+khU%X$)9T=Wu~hohV6C(_Y!a|bS$6|7ocbs{TV1`
zoK&f8XB@Pmu8s}|bb7wl3Ohx~py5!$;aEI27!E_{9vgV}5o=DXribeELcPO6_4-a?
z2ECcysNYU6)oYASuhz$j2Hf^6ce+6*DxiTZ(t=(_p{s(uj7*OQV?+XFM^<TXG!qd}
zQx;upl-Z~NFx%J`8R`pnMFwKg_yFuH&V#Tg>su4Dvr#VZqx-7WOghwD1d-JsEv0Sb
z`Z>a`CKuRK{1)z4^}715>XR=EH%UbLSnd-fSy1ZvPlZ7-B&`$AD_;=@zKGw%rKanq
zl&L12dQe2KYzq?qRty>S?^p||If*ZbA;U!^ssCr>Eig&u!iYRg7!u0ekeP0g8M^xA
zC~?1zA(s9ft@M7tN?h$YSp{f2-`m;S36JTdFYK((C1fFvE({5QEEgnX2g^QU%Skmx
zK!viEYOi!M7HV$U1=*;?9LLEp^r)dtjb^&O!AA5LhMe-u=?0BxLm0BkGg{EYsB~p(
zFHs(I-VJI*?!l1VqSMbvwExv^Ml@zwACF7ek0Gsm$2DyZ@a)Z`+nSdK{Cp+S?r68P
zo7%WGqJFC;)R=NtxuKj>4k=p|k31(`ls=S3rG$7{{7EbkP6=&7NLbI$@t62<zK`$V
zxAB!3vwn|j(EOMYeDHl;U%F*_C_R)`eL#48cxc(~Qpd8THrWff*oz0fXDj9_+#W#d
z;Il%ro91i(dFr9>;rPN?@BHNb5jr{g*(!T6=%vN@jRQ<KgBBN$T&mk>>Mk>786DTS
z2qPp#7rlCTv*I-B9_Q@^8KKcs>KWQRwuN&5d9cetkBxo4(hfK4_BkfG+-9(Kw%ZHc
zh%d^u&t4LX!*l@jiE&TrMJ`poJoUUKRW6nL9vPtoV<J!ZSSz$vt$Q^KTz!@@5ZV$h
zl<=0Bf#@((*Ti$24nyCz>6yj}!f=|gSze<%k5sTKcy_Dw_>pH>1<F2!UOKXtm7yG#
zDSLFo1_{db5|7Xa`H9)lum#>Kg7&n?cN@y=`(cDen3Mg2(Y-^UyAD4YkWr$QMYExQ
zFCe5N2ZGZ<s0eNRu8sTa*n0>Ke|`0>nVD-RZgIcgd0;{4BRfKM2$e4MM88>r5Jv6Y
z%x`WQKk9@(tkJiKqb;N5#mP-9po7Eg?S*?YZlfuz#Jw5B(kn+-lpHf)?re?Ye6(<?
z(n-%usPN~?uJ*iCM!%S_!*pMkDC6OQ6N}n9Ix?9w`Lz<tO;y1Q_p_QQIq`=Nm{!Wo
M8jM+^tv{ar4-=54a{vGU

diff --git a/examples/analytical-function/test_analytical_function.py b/examples/analytical-function/test_analytical_function.py
index e356d37da..65a71676a 100644
--- a/examples/analytical-function/test_analytical_function.py
+++ b/examples/analytical-function/test_analytical_function.py
@@ -151,23 +151,11 @@ if __name__ == "__main__":
     # Provide the experimental design object with a hdf5 file
     # MetaModelOpts.ExpDesign.hdf5_file = 'ExpDesign_AnalyticFunc.hdf5'
     
-    if 0:
-        # Train the meta model with the old engine
-        MetaModelOpts.ExpDesign = ExpDesign
-        meta_model_engine = MetaModelEngine(MetaModelOpts)
-        meta_model_engine.run()
-        PCEModel = meta_model_engine.MetaModel
     
-    if 1: 
-        # Test the new engine
-        engine = Engine(MetaModelOpts, Model, ExpDesign)
-        engine.start_engine()
-        engine.train_normal()
-        PCEModel = engine.MetaModel
-
-    # Save PCE models
-    with open(f'PCEModel_{Model.name}.pkl', 'wb') as output:
-        joblib.dump(PCEModel, output, 2)
+    # Run using the new engine
+    engine = Engine(MetaModelOpts, Model, ExpDesign)
+    engine.start_engine()
+    engine.train_normal()
 
     # Load the objects
     # with open(f"PCEModel_{Model.name}.pkl", "rb") as input:
@@ -176,7 +164,7 @@ if __name__ == "__main__":
     # =====================================================
     # =========  POST PROCESSING OF METAMODELS  ===========
     # =====================================================
-    PostPCE = PostProcessing(PCEModel, Model)
+    PostPCE = PostProcessing(engine)
 
     # Plot to check validation visually.
     PostPCE.valid_metamodel(n_samples=1)
@@ -200,7 +188,7 @@ if __name__ == "__main__":
     # =====================================================
     # ========  Bayesian inference with Emulator ==========
     # =====================================================
-    BayesOpts = BayesInference(PCEModel)
+    BayesOpts = BayesInference(engine)
     BayesOpts.emulator = True
     BayesOpts.plot_post_pred = True
 
@@ -218,7 +206,7 @@ if __name__ == "__main__":
     BayesOpts.inference_method = "MCMC"
     # Set the MCMC parameters passed to self.mcmc_params
     BayesOpts.mcmc_params = {
-        'n_steps': 1e5,
+        'n_steps': 1e4,#5,
         'n_walkers': 30,
         'moves': emcee.moves.KDEMove(),
         'multiprocessing': False,
@@ -226,6 +214,7 @@ if __name__ == "__main__":
         }
 
     # ----- Define the discrepancy model -------
+    obsData = pd.DataFrame(Model.observations, columns=Model.Output.names)
     BayesOpts.measurement_error = obsData
 
     # # -- (Option B) --
diff --git a/src/bayesvalidrox/bayes_inference/bayes_inference.py b/src/bayesvalidrox/bayes_inference/bayes_inference.py
index c8073ba76..b9d0631bd 100644
--- a/src/bayesvalidrox/bayes_inference/bayes_inference.py
+++ b/src/bayesvalidrox/bayes_inference/bayes_inference.py
@@ -125,7 +125,7 @@ class BayesInference:
 
     """
 
-    def __init__(self, MetaModel, discrepancy=None, emulator=True,
+    def __init__(self, engine, MetaModel = None, discrepancy=None, emulator=True,
                  name='Calib', bootstrap=False, req_outputs=None,
                  selected_indices=None, samples=None, n_samples=100000,
                  measured_data=None, inference_method='rejection',
@@ -135,7 +135,8 @@ class BayesInference:
                  plot_map_pred=False, max_a_posteriori='mean',
                  corner_title_fmt='.2e'):
 
-        self.MetaModel = MetaModel
+        self.engine = engine
+        self.MetaModel = engine.MetaModel
         self.Discrepancy = discrepancy
         self.emulator = emulator
         self.name = name
@@ -172,14 +173,14 @@ class BayesInference:
 
         # Set some variables
         MetaModel = self.MetaModel
-        Model = MetaModel.ModelObj
+        Model = self.engine.Model
         n_params = MetaModel.n_params
         output_names = Model.Output.names
-        par_names = MetaModel.ExpDesign.par_names
+        par_names = self.engine.ExpDesign.par_names
 
         # If the prior is set by the user, take it.
         if self.samples is None:
-            self.samples = MetaModel.ExpDesign.generate_samples(
+            self.samples = self.engine.ExpDesign.generate_samples(
                 self.n_samples, 'random')
         else:
             try:
@@ -405,7 +406,7 @@ class BayesInference:
                     with multiprocessing.Pool(n_thread) as p:
                         postExpPrior = np.mean(np.concatenate(
                             p.map(
-                                MetaModel.ExpDesign.JDist.pdf,
+                                self.engine.ExpDesign.JDist.pdf,
                                 np.array_split(posterior.T, n_thread, axis=1))
                             )
                             )
@@ -522,10 +523,10 @@ class BayesInference:
                 )
             for yi in range(len(par_names)):
                 ax = axes[yi, yi]
-                ax.set_xlim(MetaModel.bound_tuples[yi])
+                ax.set_xlim(self.engine.ExpDesign.bound_tuples[yi])
                 for xi in range(yi):
                     ax = axes[yi, xi]
-                    ax.set_xlim(MetaModel.bound_tuples[xi])
+                    ax.set_xlim(self.engine.ExpDesign.bound_tuples[xi])
         plt.close()
 
         # Turn off gridlines
@@ -690,10 +691,10 @@ class BayesInference:
 
         """
         MetaModel = self.MetaModel
-        Model = MetaModel.ModelObj
+        Model = self.engine.Model
 
         if samples is None:
-            self.samples = MetaModel.ExpDesign.generate_samples(
+            self.samples = self.engine.ExpDesign.generate_samples(
                 self.n_samples, 'random')
         else:
             self.samples = samples
@@ -793,7 +794,7 @@ class BayesInference:
             Likelihoods.
 
         """
-        Model = self.MetaModel.ModelObj
+        Model = self.engine.Model
         logLik = 0.0
 
         # Extract the requested model outputs for likelihood calulation
@@ -891,8 +892,8 @@ class BayesInference:
         Calculates the correction factor for BMEs.
         """
         MetaModel = self.MetaModel
-        OrigModelOutput = MetaModel.ExpDesign.Y
-        Model = MetaModel.ModelObj
+        OrigModelOutput = self.engine.ExpDesign.Y
+        Model = self.engine.Model
 
         # Posterior with guassian-likelihood
         postDist = stats.gaussian_kde(posterior.T)
@@ -918,7 +919,7 @@ class BayesInference:
 
         NrofBayesSamples = self.n_samples
         # Evaluate MetaModel on the experimental design
-        Samples = MetaModel.ExpDesign.X
+        Samples = self.engine.ExpDesign.X
         OutputRS, stdOutputRS = MetaModel.eval_metamodel(samples=Samples)
 
         # Reset the NrofSamples to NrofBayesSamples
@@ -945,9 +946,9 @@ class BayesInference:
         Calculates the correction factor for BMEs.
         """
         MetaModel = self.MetaModel
-        samples = MetaModel.ExpDesign.X
-        model_outputs = MetaModel.ExpDesign.Y
-        Model = MetaModel.ModelObj
+        samples = self.engine.ExpDesign.X
+        model_outputs = self.engine.ExpDesign.Y
+        Model = self.engine.Model
         n_samples = samples.shape[0]
 
         # Extract the requested model outputs for likelihood calulation
@@ -1061,7 +1062,7 @@ class BayesInference:
         accepted_samples = samples[norm_ikelihoods >= unif]
 
         # Output the Posterior
-        par_names = MetaModel.ExpDesign.par_names
+        par_names = self.engine.ExpDesign.par_names
         if sigma2_prior is not None:
             for name in self.Discrepancy.name:
                 par_names.append(name)
@@ -1087,7 +1088,7 @@ class BayesInference:
         """
 
         MetaModel = self.MetaModel
-        Model = MetaModel.ModelObj
+        Model = self.engine.Model
 
         # Make a directory to save the prior/posterior predictive
         out_dir = f'Outputs_Bayes_{Model.name}_{self.name}'
@@ -1101,7 +1102,7 @@ class BayesInference:
             self.measured_data = pd.DataFrame(self.measured_data)
 
         # X_values
-        x_values = MetaModel.ExpDesign.x_values
+        x_values = self.engine.ExpDesign.x_values
 
         try:
             sigma2_prior = self.Discrepancy.sigma2_prior
@@ -1276,7 +1277,7 @@ class BayesInference:
         """
 
         MetaModel = self.MetaModel
-        Model = MetaModel.ModelObj
+        Model = self.engine.Model
         out_dir = f'Outputs_Bayes_{Model.name}_{self.name}'
         opt_sigma = self.Discrepancy.opt_sigma
 
@@ -1375,7 +1376,7 @@ class BayesInference:
 
         """
 
-        Model = self.MetaModel.ModelObj
+        Model = self.engine.Model
         out_dir = f'Outputs_Bayes_{Model.name}_{self.name}'
         # Plot the posterior predictive
         for out_idx, out_name in enumerate(Model.Output.names):
@@ -1490,7 +1491,7 @@ class BayesInference:
                         'ko', label='data', markeredgecolor='w')
 
                     # --- Plot ExpDesign ---
-                    orig_ED_Y = self.MetaModel.ExpDesign.Y[out_name]
+                    orig_ED_Y = self.engine.ExpDesign.Y[out_name]
                     for output in orig_ED_Y:
                         plt.plot(
                             x_coords, output, color='grey', alpha=0.15
diff --git a/src/bayesvalidrox/bayes_inference/mcmc.py b/src/bayesvalidrox/bayes_inference/mcmc.py
index 6b9ca9482..026b4a3f9 100755
--- a/src/bayesvalidrox/bayes_inference/mcmc.py
+++ b/src/bayesvalidrox/bayes_inference/mcmc.py
@@ -47,11 +47,11 @@ class MCMC:
     def run_sampler(self, observation, total_sigma2):
 
         BayesObj = self.BayesOpts
-        MetaModel = BayesObj.MetaModel
-        Model = MetaModel.ModelObj
+        MetaModel = BayesObj.engine.MetaModel
+        Model = BayesObj.engine.Model
         Discrepancy = self.BayesOpts.Discrepancy
         n_cpus = Model.n_cpus
-        priorDist = MetaModel.ExpDesign.JDist
+        priorDist = BayesObj.engine.ExpDesign.JDist
         ndim = MetaModel.n_params
         self.counter = 0
         output_dir = f'Outputs_Bayes_{Model.name}_{self.BayesOpts.name}'
diff --git a/src/bayesvalidrox/post_processing/post_processing.py b/src/bayesvalidrox/post_processing/post_processing.py
index 0b77fa380..27027ba07 100644
--- a/src/bayesvalidrox/post_processing/post_processing.py
+++ b/src/bayesvalidrox/post_processing/post_processing.py
@@ -32,9 +32,10 @@ class PostProcessing:
         expected to be performed change this to `'valid'`.
     """
 
-    def __init__(self, MetaModel, Model, name='calib'):
-        self.MetaModel = MetaModel
-        self.ModelObj = Model
+    def __init__(self, engine, name='calib'):
+        self.MetaModel = engine.MetaModel
+        self.ExpDesign = engine.ExpDesign
+        self.ModelObj = engine.Model
         self.name = name
 
     # -------------------------------------------------------------------------
@@ -1150,8 +1151,7 @@ class PostProcessing:
         """
         if n_samples is None:
             n_samples = self.n_samples
-        MetaModel = self.MetaModel
-        self.samples = MetaModel.ExpDesign.generate_samples(
+        self.samples = self.ExpDesign.generate_samples(
             n_samples,
             sampling_method='random')
         return self.samples
diff --git a/src/bayesvalidrox/surrogate_models/engine.py b/src/bayesvalidrox/surrogate_models/engine.py
index 7b3adc041..619fbbca5 100644
--- a/src/bayesvalidrox/surrogate_models/engine.py
+++ b/src/bayesvalidrox/surrogate_models/engine.py
@@ -13,7 +13,6 @@ import os
 
 from .inputs import Input
 from .exp_designs import ExpDesigns
-from .exp_designs_ import ExpDesigns_
 from .surrogate_models import MetaModel
 from bayesvalidrox.post_processing.post_processing import PostProcessing
 
@@ -96,6 +95,8 @@ class Engine():
         
         # TODO: this just a fix for the evals - remove later
         MetaModel.ExpDesign = ExpDesign
+        MetaModel.InputSpace = ExpDesign
+        MetaModel.ModelObj = self.Model
         
         # Save what there is to save
         if save:
@@ -159,108 +160,7 @@ class Engine():
                 sampling_method
                 )
 
-        # Transform samples to the independent space
-        samples = self.ExpDesign.transform(
-            samples,
-            method='user'
-            )
-        # print(samples)
-
-        # Compute univariate bases for the given samples
-        if self.meta_model_type.lower() != 'gpe':
-            univ_p_val = self.univ_basis_vals(
-                samples,
-                n_max=np.max(self.pce_deg)
-                )
-
-        mean_pred_b = {}
-        std_pred_b = {}
-        # Loop over bootstrap iterations
-        for b_i in range(self.n_bootstrap_itrs):
-
-            # Extract model dictionary
-            if self.meta_model_type.lower() == 'gpe':
-                model_dict = self.gp_poly[f'b_{b_i+1}']
-            else:
-                model_dict = self.coeffs_dict[f'b_{b_i+1}']
-
-            # Loop over outputs
-            mean_pred = {}
-            std_pred = {}
-            for output, values in model_dict.items():
-
-                mean = np.empty((len(samples), len(values)))
-                std = np.empty((len(samples), len(values)))
-                idx = 0
-                for in_key, InIdxValues in values.items():
-
-                    # Perdiction with GPE
-                    if self.meta_model_type.lower() == 'gpe':
-                        X_T = self.x_scaler[f'b_{b_i+1}'][output].transform(samples)
-                        gp = self.gp_poly[f'b_{b_i+1}'][output][in_key]
-                        y_mean, y_std = gp.predict(X_T, return_std=True)
-
-                    else:
-                        # Perdiction with PCE
-                        # Assemble Psi matrix
-                        basis = self.basis_dict[f'b_{b_i+1}'][output][in_key]
-                        psi = self.create_psi(basis, univ_p_val)
-
-                        # Perdiction
-                        if self.bootstrap_method != 'fast' or b_i == 0:
-                            # with error bar, i.e. use clf_poly
-                            clf_poly = self.clf_poly[f'b_{b_i+1}'][output][in_key]
-                            try:
-                                y_mean, y_std = clf_poly.predict(
-                                    psi, return_std=True
-                                    )
-                            except TypeError:
-                                y_mean = clf_poly.predict(psi)
-                                y_std = np.zeros_like(y_mean)
-                        else:
-                            # without error bar
-                            coeffs = self.coeffs_dict[f'b_{b_i+1}'][output][in_key]
-                            y_mean = np.dot(psi, coeffs)
-                            y_std = np.zeros_like(y_mean)
-
-                    mean[:, idx] = y_mean
-                    std[:, idx] = y_std
-                    idx += 1
-
-                # Save predictions for each output
-                if self.dim_red_method.lower() == 'pca':
-                    PCA = self.pca[f'b_{b_i+1}'][output]
-                    mean_pred[output] = PCA.inverse_transform(mean)
-                    std_pred[output] = np.zeros(mean.shape)
-                else:
-                    mean_pred[output] = mean
-                    std_pred[output] = std
-
-            # Save predictions for each bootstrap iteration
-            mean_pred_b[b_i] = mean_pred
-            std_pred_b[b_i] = std_pred
-
-        # Change the order of nesting
-        mean_pred_all = {}
-        for i in sorted(mean_pred_b):
-            for k, v in mean_pred_b[i].items():
-                if k not in mean_pred_all:
-                    mean_pred_all[k] = [None] * len(mean_pred_b)
-                mean_pred_all[k][i] = v
-
-        # Compute the moments of predictions over the predictions
-        for output in self.out_names:
-            # Only use bootstraps with finite values
-            finite_rows = np.isfinite(
-                mean_pred_all[output]).all(axis=2).all(axis=1)
-            outs = np.asarray(mean_pred_all[output])[finite_rows]
-            # Compute mean
-            mean_pred[output] = np.mean(outs, axis=0)
-            # Compute standard deviation
-            if self.n_bootstrap_itrs > 1:
-                std_pred[output] = np.std(outs, axis=0)
-            else:
-                std_pred[output] = std_pred_b[b_i][output]
+        mean_pred, std_pred = self.MetaModel.eval_surrogate(samples)
 
         if return_samples:
             return mean_pred, std_pred, samples
diff --git a/src/bayesvalidrox/surrogate_models/exp_designs.py b/src/bayesvalidrox/surrogate_models/exp_designs.py
index 1036dbbba..76b5afe78 100644
--- a/src/bayesvalidrox/surrogate_models/exp_designs.py
+++ b/src/bayesvalidrox/surrogate_models/exp_designs.py
@@ -293,6 +293,38 @@ class ExpDesigns:
                 up_bound = np.max(Inputs.Marginals[i].input_data)
                 Inputs.Marginals[i].parameters = [low_bound, up_bound]
 
+    # -------------------------------------------------------------------------
+    def generate_samples(self, n_samples, sampling_method='random',
+                         transform=False):
+        """
+        Generates samples with given sampling method
+
+        Parameters
+        ----------
+        n_samples : int
+            Number of requested samples.
+        sampling_method : str, optional
+            Sampling method. The default is `'random'`.
+        transform : bool, optional
+            Transformation via an isoprobabilistic transformation method. The
+            default is `False`.
+
+        Returns
+        -------
+        samples: array of shape (n_samples, n_params)
+            Generated samples from defined model input object.
+
+        """
+        try:
+            samples = chaospy.generate_samples(
+                int(n_samples), domain=self.origJDist, rule=sampling_method
+                )
+        except:
+            samples = self.random_sampler(int(n_samples)).T
+
+        return samples.T
+
+
             
     # -------------------------------------------------------------------------
     def generate_ED(self, n_samples, transform=False,
diff --git a/src/bayesvalidrox/surrogate_models/exp_designs_.py b/src/bayesvalidrox/surrogate_models/exp_designs_.py
deleted file mode 100644
index 36146225d..000000000
--- a/src/bayesvalidrox/surrogate_models/exp_designs_.py
+++ /dev/null
@@ -1,508 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-
-import numpy as np
-import math
-import itertools
-import chaospy
-import scipy.stats as st
-from tqdm import tqdm
-import h5py
-import os
-
-from .apoly_construction import apoly_construction
-
-
-class ExpDesigns_:
-    """
-    This class generates samples from the prescribed marginals for the model
-    parameters using the `Input` object.
-
-    Attributes
-    ----------
-    Input : obj
-        Input object containing the parameter marginals, i.e. name,
-        distribution type and distribution parameters or available raw data.
-    method : str
-        Type of the experimental design. The default is `'normal'`. Other
-        option is `'sequential'`.
-    meta_Model_type : str
-        Type of the meta_Model_type.
-    sampling_method : str
-        Name of the sampling method for the experimental design. The following
-        sampling method are supported:
-
-        * random
-        * latin_hypercube
-        * sobol
-        * halton
-        * hammersley
-        * chebyshev(FT)
-        * grid(FT)
-        * user
-    hdf5_file : str
-        Name of the hdf5 file that contains the experimental design.
-    n_new_samples : int
-        Number of (initial) training points.
-    n_max_samples : int
-        Number of maximum training points.
-    mod_LOO_threshold : float
-        The modified leave-one-out cross validation threshold where the
-        sequential design stops.
-    tradeoff_scheme : str
-        Trade-off scheme to assign weights to the exploration and exploitation
-        scores in the sequential design.
-    n_canddidate : int
-        Number of candidate training sets to calculate the scores for.
-    explore_method : str
-        Type of the exploration method for the sequential design. The following
-        methods are supported:
-
-        * Voronoi
-        * random
-        * latin_hypercube
-        * LOOCV
-        * dual annealing
-    exploit_method : str
-        Type of the exploitation method for the sequential design. The
-        following methods are supported:
-
-        * BayesOptDesign
-        * BayesActDesign
-        * VarOptDesign
-        * alphabetic
-        * Space-filling
-    util_func : str or list
-        The utility function to be specified for the `exploit_method`. For the
-        available utility functions see Note section.
-    n_cand_groups : int
-        Number of candidate groups. Each group of candidate training sets will
-        be evaulated separately in parallel.
-    n_replication : int
-        Number of replications. Only for comparison. The default is 1.
-    post_snapshot : int
-        Whether to plot the posterior in the sequential design. The default is
-        `True`.
-    step_snapshot : int
-        The number of steps to plot the posterior in the sequential design. The
-        default is 1.
-    max_a_post : list or array
-        Maximum a posteriori of the posterior distribution, if known. The
-        default is `[]`.
-    adapt_verbose : bool
-        Whether to plot the model response vs that of metamodel for the new
-        trining point in the sequential design.
-
-    Note
-    ----------
-    The following utiliy functions for the **exploitation** methods are
-    supported:
-
-    #### BayesOptDesign (when data is available)
-    - DKL (Kullback-Leibler Divergence)
-    - DPP (D-Posterior-percision)
-    - APP (A-Posterior-percision)
-
-    #### VarBasedOptDesign -> when data is not available
-    - Entropy (Entropy/MMSE/active learning)
-    - EIGF (Expected Improvement for Global fit)
-    - LOOCV (Leave-one-out Cross Validation)
-
-    #### alphabetic
-    - D-Opt (D-Optimality)
-    - A-Opt (A-Optimality)
-    - K-Opt (K-Optimality)
-    """
-
-    def __init__(self, Input, method='normal', meta_Model_type='pce',
-                 sampling_method='random', hdf5_file=None,
-                 n_new_samples=1, n_max_samples=None, mod_LOO_threshold=1e-16,
-                 tradeoff_scheme=None, n_canddidate=1, explore_method='random',
-                 exploit_method='Space-filling', util_func='Space-filling',
-                 n_cand_groups=4, n_replication=1, post_snapshot=False,
-                 step_snapshot=1, max_a_post=[], adapt_verbose=False):
-
-        self.InputObj = Input
-        self.method = method   # TODO: this still doing sth?
-        self.meta_Model_type = meta_Model_type
-        self.sampling_method = sampling_method
-        self.hdf5_file = hdf5_file
-        self.n_new_samples = n_new_samples
-        self.n_max_samples = n_max_samples
-        self.mod_LOO_threshold = mod_LOO_threshold
-        self.explore_method = explore_method
-        self.exploit_method = exploit_method
-        self.util_func = util_func
-        self.tradeoff_scheme = tradeoff_scheme
-        self.n_canddidate = n_canddidate
-        self.n_cand_groups = n_cand_groups
-        self.n_replication = n_replication
-        self.post_snapshot = post_snapshot
-        self.step_snapshot = step_snapshot
-        self.max_a_post = max_a_post
-        self.adapt_verbose = adapt_verbose
-        
-        # Other 
-        self.apce = None
-        self.ndim = None
-        
-        # Init 
-        self.check_valid_inputs()
-        
-        
-    def check_valid_inputs(self)-> None:
-        """
-        Check if the given InputObj is valid to use for further calculations:
-            Has some Marginals
-            Marginals have valid priors
-            All Marginals given as the same type (samples vs dist)
-
-        Returns
-        -------
-        None
-
-        """
-        Inputs = self.InputObj
-        self.ndim = len(Inputs.Marginals)
-        
-        # Check if PCE or aPCE metamodel is selected.
-        # TODO: test also for 'pce'??
-        if self.meta_Model_type.lower() == 'apce':
-            self.apce = True
-        else:
-            self.apce = False
-
-        # check if marginals given 
-        if not self.ndim >=1:
-            raise AssertionError('Cannot build distributions if no marginals are given')
-            
-        # check that each marginal is valid
-        for marginals in Inputs.Marginals:
-            if len(marginals.input_data) == 0:
-                if marginals.dist_type == None:
-                    raise AssertionError('Not all marginals were provided priors')
-                    break
-            if np.array(marginals.input_data).shape[0] and (marginals.dist_type != None):
-                raise AssertionError('Both samples and distribution type are given. Please choose only one.')
-                break
-                
-        # Check if input is given as dist or input_data.
-        self.input_data_given = -1
-        for marg in Inputs.Marginals:
-            #print(self.input_data_given)
-            size = np.array(marg.input_data).shape[0]
-            #print(f'Size: {size}')
-            if size and abs(self.input_data_given) !=1:
-                self.input_data_given = 2
-                break
-            if (not size) and self.input_data_given > 0:
-                self.input_data_given = 2
-                break
-            if not size:
-                self.input_data_given = 0
-            if size:
-                self.input_data_given = 1
-                
-        if self.input_data_given == 2:
-            raise AssertionError('Distributions cannot be built as the priors have different types')
-            
-    
-        # Get the bounds if input_data are directly defined by user:
-        if self.input_data_given:
-            for i in range(self.ndim):
-                low_bound = np.min(Inputs.Marginals[i].input_data)
-                up_bound = np.max(Inputs.Marginals[i].input_data)
-                Inputs.Marginals[i].parameters = [low_bound, up_bound]
-
-  
-
-    # -------------------------------------------------------------------------
-    def init_param_space(self, max_deg=None):
-        """
-        Initializes parameter space.
-
-        Parameters
-        ----------
-        max_deg : int, optional
-            Maximum degree. The default is `None`.
-
-        Returns
-        -------
-        raw_data : array of shape (n_params, n_samples)
-            Raw data.
-        bound_tuples : list of tuples
-            A list containing lower and upper bounds of parameters.
-
-        """
-        Inputs = self.InputObj
-        ndim = self.ndim
-        rosenblatt_flag = Inputs.Rosenblatt
-        mc_size = 50000
-
-        # Save parameter names
-        self.par_names = []
-        for parIdx in range(ndim):
-            self.par_names.append(Inputs.Marginals[parIdx].name)
-
-        # Create a multivariate probability distribution
-        # TODO: change this to make max_deg obligatory? at least in some specific cases?
-        if max_deg is not None:
-            JDist, poly_types = self.build_polytypes(rosenblatt=rosenblatt_flag)
-            self.JDist, self.poly_types = JDist, poly_types
-
-        if self.input_data_given:
-            self.MCSize = len(Inputs.Marginals[0].input_data)
-            self.raw_data = np.zeros((ndim, self.MCSize))
-
-            for parIdx in range(ndim):
-                # Save parameter names
-                try:
-                    self.raw_data[parIdx] = np.array(
-                        Inputs.Marginals[parIdx].input_data)
-                except:
-                    self.raw_data[parIdx] = self.JDist[parIdx].sample(mc_size)
-
-        else:
-            # Generate random samples based on parameter distributions
-            self.raw_data = chaospy.generate_samples(mc_size,
-                                                     domain=self.JDist)
-
-        # Extract moments
-        for parIdx in range(ndim):
-            mu = np.mean(self.raw_data[parIdx])
-            std = np.std(self.raw_data[parIdx])
-            self.InputObj.Marginals[parIdx].moments = [mu, std]
-
-        # Generate the bounds based on given inputs for marginals
-        bound_tuples = []
-        for i in range(ndim):
-            if Inputs.Marginals[i].dist_type == 'unif':
-                low_bound, up_bound = Inputs.Marginals[i].parameters
-            else:
-                low_bound = np.min(self.raw_data[i])
-                up_bound = np.max(self.raw_data[i])
-
-            bound_tuples.append((low_bound, up_bound))
-
-        self.bound_tuples = tuple(bound_tuples)
-
-    # -------------------------------------------------------------------------
-    def build_polytypes(self, rosenblatt):
-        """
-        Creates the polynomial types to be passed to univ_basis_vals method of
-        the MetaModel object.
-
-        Parameters
-        ----------
-        rosenblatt : bool
-            Rosenblatt transformation flag.
-
-        Returns
-        -------
-        orig_space_dist : object
-            A chaospy JDist object or a gaussian_kde object.
-        poly_types : list
-            List of polynomial types for the parameters.
-
-        """
-        Inputs = self.InputObj
-        
-        all_data = []
-        all_dist_types = []
-        orig_joints = []
-        poly_types = []
-        
-        for parIdx in range(self.ndim):
-
-            if Inputs.Marginals[parIdx].dist_type is None:
-                data = Inputs.Marginals[parIdx].input_data
-                all_data.append(data)
-                dist_type = None
-            else:
-                dist_type = Inputs.Marginals[parIdx].dist_type
-                params = Inputs.Marginals[parIdx].parameters
-
-            if rosenblatt:
-                polytype = 'hermite'
-                dist = chaospy.Normal()
-
-            elif dist_type is None:
-                polytype = 'arbitrary'
-                dist = None
-
-            elif 'unif' in dist_type.lower():
-                polytype = 'legendre'
-                if not np.array(params).shape[0]>=2:
-                    raise AssertionError(f'Distribution has too few parameters!')
-                dist = chaospy.Uniform(lower=params[0], upper=params[1])
-
-            elif 'norm' in dist_type.lower() and \
-                 'log' not in dist_type.lower():
-                if not np.array(params).shape[0]>=2:
-                    raise AssertionError(f'Distribution has too few parameters!')
-                polytype = 'hermite'
-                dist = chaospy.Normal(mu=params[0], sigma=params[1])
-
-            elif 'gamma' in dist_type.lower():
-                polytype = 'laguerre'
-                if not np.array(params).shape[0]>=3:
-                    raise AssertionError(f'Distribution has too few parameters!')
-                dist = chaospy.Gamma(shape=params[0],
-                                     scale=params[1],
-                                     shift=params[2])
-
-            elif 'beta' in dist_type.lower():
-                if not np.array(params).shape[0]>=4:
-                    raise AssertionError(f'Distribution has too few parameters!')
-                polytype = 'jacobi'
-                dist = chaospy.Beta(alpha=params[0], beta=params[1],
-                                    lower=params[2], upper=params[3])
-
-            elif 'lognorm' in dist_type.lower():
-                polytype = 'hermite'
-                if not np.array(params).shape[0]>=2:
-                    raise AssertionError(f'Distribution has too few parameters!')
-                mu = np.log(params[0]**2/np.sqrt(params[0]**2 + params[1]**2))
-                sigma = np.sqrt(np.log(1 + params[1]**2 / params[0]**2))
-                dist = chaospy.LogNormal(mu, sigma)
-                # dist = chaospy.LogNormal(mu=params[0], sigma=params[1])
-
-            elif 'expon' in dist_type.lower():
-                polytype = 'exponential'
-                if not np.array(params).shape[0]>=2:
-                    raise AssertionError(f'Distribution has too few parameters!')
-                dist = chaospy.Exponential(scale=params[0], shift=params[1])
-
-            elif 'weibull' in dist_type.lower():
-                polytype = 'weibull'
-                if not np.array(params).shape[0]>=3:
-                    raise AssertionError(f'Distribution has too few parameters!')
-                dist = chaospy.Weibull(shape=params[0], scale=params[1],
-                                       shift=params[2])
-
-            else:
-                message = (f"DistType {dist_type} for parameter"
-                           f"{parIdx+1} is not available.")
-                raise ValueError(message)
-
-            if self.input_data_given or self.apce:
-                polytype = 'arbitrary'
-
-            # Store dists and poly_types
-            orig_joints.append(dist)
-            poly_types.append(polytype)
-            all_dist_types.append(dist_type)
-
-        # Prepare final output to return
-        if None in all_dist_types:
-            # Naive approach: Fit a gaussian kernel to the provided data
-            Data = np.asarray(all_data)
-            try:
-                orig_space_dist = st.gaussian_kde(Data)
-            except:
-                raise ValueError('The samples provided to the Marginals should be 1D only')
-            self.prior_space = orig_space_dist
-        else:
-            orig_space_dist = chaospy.J(*orig_joints)
-            try:
-                self.prior_space = st.gaussian_kde(orig_space_dist.sample(10000))
-            except:
-                raise ValueError('Parameter values are not valid, please set differently')
-
-        return orig_space_dist, poly_types
-
-    # -------------------------------------------------------------------------
-    def transform(self, X, params=None, method=None):
-        """
-        Transform the samples via either a Rosenblatt or an isoprobabilistic
-        transformation.
-
-        Parameters
-        ----------
-        X : array of shape (n_samples,n_params)
-            Samples to be transformed.
-        method : string
-            If transformation method is 'user' transform X, else just pass X.
-
-        Returns
-        -------
-        tr_X: array of shape (n_samples,n_params)
-            Transformed samples.
-
-        """
-        # Check for built JDist
-        if not hasattr(self, 'JDist'):
-            raise AttributeError('Call function init_param_space first to create JDist')
-            
-        # Check if X is 2d
-        if X.ndim != 2:
-            raise AttributeError('X should have two dimensions')
-            
-        # Check if size of X matches Marginals
-        if X.shape[1]!= self.ndim:
-            raise AttributeError('The second dimension of X should be the same size as the number of marginals in the InputObj')
-        
-        if self.InputObj.Rosenblatt:
-            self.origJDist, _ = self.build_polytypes(False)
-            if method == 'user':
-                tr_X = self.JDist.inv(self.origJDist.fwd(X.T)).T
-            else:
-                # Inverse to original spcace -- generate sample ED
-                tr_X = self.origJDist.inv(self.JDist.fwd(X.T)).T
-        else:
-            # Transform samples via an isoprobabilistic transformation
-            n_samples, n_params = X.shape
-            Inputs = self.InputObj
-            origJDist = self.JDist
-            poly_types = self.poly_types
-
-            disttypes = []
-            for par_i in range(n_params):
-                disttypes.append(Inputs.Marginals[par_i].dist_type)
-
-            # Pass non-transformed X, if arbitrary PCE is selected.
-            if None in disttypes or self.input_data_given or self.apce:
-                return X
-
-            cdfx = np.zeros((X.shape))
-            tr_X = np.zeros((X.shape))
-
-            for par_i in range(n_params):
-
-                # Extract the parameters of the original space
-                disttype = disttypes[par_i]
-                if disttype is not None:
-                    dist = origJDist[par_i]
-                else:
-                    dist = None
-                polytype = poly_types[par_i]
-                cdf = np.vectorize(lambda x: dist.cdf(x))
-
-                # Extract the parameters of the transformation space based on
-                # polyType
-                if polytype == 'legendre' or disttype == 'uniform':
-                    # Generate Y_Dists based
-                    params_Y = [-1, 1]
-                    dist_Y = st.uniform(loc=params_Y[0],
-                                        scale=params_Y[1]-params_Y[0])
-                    inv_cdf = np.vectorize(lambda x: dist_Y.ppf(x))
-
-                elif polytype == 'hermite' or disttype == 'norm':
-                    params_Y = [0, 1]
-                    dist_Y = st.norm(loc=params_Y[0], scale=params_Y[1])
-                    inv_cdf = np.vectorize(lambda x: dist_Y.ppf(x))
-
-                elif polytype == 'laguerre' or disttype == 'gamma':
-                    params_Y = [1, params[1]]
-                    dist_Y = st.gamma(loc=params_Y[0], scale=params_Y[1])
-                    inv_cdf = np.vectorize(lambda x: dist_Y.ppf(x))
-
-                # Compute CDF_x(X)
-                cdfx[:, par_i] = cdf(X[:, par_i])
-
-                # Compute invCDF_y(cdfx)
-                tr_X[:, par_i] = inv_cdf(cdfx[:, par_i])
-
-        return tr_X
-
-
diff --git a/src/bayesvalidrox/surrogate_models/input_space.py b/src/bayesvalidrox/surrogate_models/input_space.py
index e0a94a98a..c46e925bc 100644
--- a/src/bayesvalidrox/surrogate_models/input_space.py
+++ b/src/bayesvalidrox/surrogate_models/input_space.py
@@ -15,8 +15,8 @@ from .apoly_construction import apoly_construction
 
 class InputSpace:
     """
-    This class generates the input space for the metamodel polynomials
-    using the marginals defined in the `Input` object.
+    This class generates samples from the prescribed marginals for the model
+    parameters using the `Input` object.
 
     Attributes
     ----------
@@ -214,6 +214,7 @@ class InputSpace:
                 up_bound = np.max(Inputs.Marginals[i].input_data)
                 Inputs.Marginals[i].parameters = [low_bound, up_bound]
 
+  
 
     # -------------------------------------------------------------------------
     def init_param_space(self, max_deg=None):
diff --git a/src/bayesvalidrox/surrogate_models/surrogate_models.py b/src/bayesvalidrox/surrogate_models/surrogate_models.py
index b88f128b2..326539f9a 100644
--- a/src/bayesvalidrox/surrogate_models/surrogate_models.py
+++ b/src/bayesvalidrox/surrogate_models/surrogate_models.py
@@ -17,7 +17,7 @@ import os
 from joblib import Parallel, delayed
 import copy
 
-from .exp_designs_ import ExpDesigns_
+from .input_space import InputSpace
 from .glexindex import glexindex
 from .eval_rec_rule import eval_univ_basis
 from .reg_fast_ard import RegressionFastARD
@@ -91,11 +91,11 @@ class MetaModel():
     To define the sampling methods and the training set, an experimental design
     instance shall be defined. This can be done by:
 
-    >>> MetaModelOpts.add_ExpDesign()
+    >>> MetaModelOpts.add_InputSpace()
 
     Two experimental design schemes are supported: one-shot (`normal`) and
     adaptive sequential (`sequential`) designs.
-    For experimental design refer to `ExpDesigns`.
+    For experimental design refer to `InputSpace`.
 
     """
 
@@ -115,7 +115,7 @@ class MetaModel():
         self.dim_red_method = dim_red_method
         self.verbose = verbose
  
-    def build_metamodel(self, n_init_samples) -> None:
+    def build_metamodel(self, n_init_samples = None) -> None:
         """
         Builds the parts for the metamodel (polynomes,...) that are neede before fitting.
 
@@ -126,13 +126,13 @@ class MetaModel():
 
         """
         
-        # Add expDesign to MetaModel if it does not have any
-        if not hasattr(self, 'ExpDesign'):
-            self.ExpDesign = ExpDesigns_(self.input_obj)
-            self.ExpDesign.n_init_samples = n_init_samples
-            self.ExpDesign.init_param_space(np.max(self.pce_deg))
+        # Add InputSpace to MetaModel if it does not have any
+        if not hasattr(self, 'InputSpace'):
+            self.InputSpace = InputSpace(self.input_obj)
+            self.InputSpace.n_init_samples = n_init_samples
+            self.InputSpace.init_param_space(np.max(self.pce_deg))
             
-        self.ndim = self.ExpDesign.ndim
+        self.ndim = self.InputSpace.ndim
         
         if not hasattr(self, 'CollocationPoints'):
             raise AttributeError('Please provide samples to the metamodle before building it.')
@@ -175,7 +175,7 @@ class MetaModel():
                                               reverse=False, graded=True)
                     self.allBasisIndices[str(deg)][str(q)] = basis_indices
 
-        # Evaluate the univariate polynomials on ExpDesign
+        # Evaluate the univariate polynomials on InputSpace
         if self.meta_model_type.lower() != 'gpe':
            self.univ_p_val = self.univ_basis_vals(self.CollocationPoints)
         
@@ -193,18 +193,18 @@ class MetaModel():
 
         """
         Model = self.ModelObj
-        self.ExpDesignFlag = 'normal'
+        self.InputSpaceFlag = 'normal'
         # --- Prepare pce degree ---
         if self.meta_model_type.lower() == 'pce':
             if type(self.pce_deg) is not np.ndarray:
                 self.pce_deg = np.array(self.pce_deg)
 
-        #if self.ExpDesign.method == 'sequential':
+        #if self.InputSpace.method == 'sequential':
         #    raise Exception(
         #        "Please use MetaModelEngine class for the sequential design!"
         #        )
 
-        #elif self.ExpDesign.method == 'normal':
+        #elif self.InputSpace.method == 'normal':
         self.train_norm_design(verbose=True)
 
         #else:
@@ -214,7 +214,7 @@ class MetaModel():
         # Zip the model run directories
         # TODO: can move this elsewhere? Is this important?
         if self.ModelObj.link_type.lower() == 'pylink' and\
-           self.ExpDesign.sampling_method.lower() != 'user':
+           self.InputSpace.sampling_method.lower() != 'user':
             Model.zip_subdirs(Model.name, f'{Model.name}_')
 
         return self
@@ -239,18 +239,18 @@ class MetaModel():
 
         """
         # Get the collocation points to run the forward model
-        if not hasattr(self.ExpDesign, 'n_init_samples'):
+        if not hasattr(self.InputSpace, 'n_init_samples'):
             if n_samples == None or n_samples<=0:
                 raise ValueError('The number of samples provided for static training is not valid, please provide an int>0')
-            self.CollocationPoints, self.OutputDict = self.generate_ExpDesign(self.ModelObj, n_samples)
+            self.CollocationPoints, self.OutputDict = self.generate_InputSpace(self.ModelObj, n_samples)
         else:
-            self.CollocationPoints, self.OutputDict = self.generate_ExpDesign(self.ModelObj, self.ExpDesign.n_init_samples)
+            self.CollocationPoints, self.OutputDict = self.generate_InputSpace(self.ModelObj, self.InputSpace.n_init_samples)
         
         if 'x_values' in self.OutputDict:
-            self.ExpDesign.x_values = self.OutputDict['x_values']
+            self.InputSpace.x_values = self.OutputDict['x_values']
             del self.OutputDict['x_values']
         
-        self.ndim = self.ExpDesign.ndim
+        self.ndim = self.InputSpace.ndim
     
         # Build the metamodel if not done already
         self.build_metamodel()
@@ -451,7 +451,7 @@ class MetaModel():
         return final_out_dict
 
     # -------------------------------------------------------------------------
-    def add_ExpDesign(self):
+    def add_InputSpace(self):
         """
         Instanciates experimental design object.
 
@@ -460,11 +460,11 @@ class MetaModel():
         None.
 
         """
-        self.ExpDesign = ExpDesigns_(self.input_obj,
+        self.InputSpace = InputSpace(self.input_obj,
                                     meta_Model_type=self.meta_model_type)
 
     # -------------------------------------------------------------------------
-    def generate_ExpDesign(self, Model, n_samples):
+    def generate_InputSpace(self, Model, n_samples):
         """
         Prepares the experimental design either by reading from the prescribed
         data or running simulations.
@@ -486,43 +486,43 @@ class MetaModel():
         ED_Y: dict
             Model simulations (target) for all outputs.
         """
-        # Create ExpDesign if not already there
-        if not hasattr(self, 'ExpDesign'):
-            self.add_ExpDesign()
+        # Create InputSpace if not already there
+        if not hasattr(self, 'InputSpace'):
+            self.add_InputSpace()
             
-        ExpDesign = self.ExpDesign
-        # Read ExpDesign (training and targets) from the provided hdf5
-        if ExpDesign.hdf5_file is not None:
+        InputSpace = self.InputSpace
+        # Read InputSpace (training and targets) from the provided hdf5
+        if InputSpace.hdf5_file is not None:
             # TODO: need to run 'generate_ED' as well after this or not?
-            ExpDesign.read_from_file(self.out_names)
+            InputSpace.read_from_file(self.out_names)
         else:
             # Check if an old hdf5 file exists: if yes, rename it
             if hasattr(self.ModelObj, 'name'):
                self.name = self.ModelObj.name 
             else:
                 self.name = 'MetaModel'
-            hdf5file = f'ExpDesign_{self.name}.hdf5'
+            hdf5file = f'InputSpace_{self.name}.hdf5'
             if os.path.exists(hdf5file):
                 os.rename(hdf5file, 'old_'+hdf5file)
 
         # ---- Prepare X samples ----
-        ExpDesign.generate_ED(n_samples, transform=True, max_pce_deg=np.max(self.pce_deg))
+        InputSpace.generate_ED(n_samples, transform=True, max_pce_deg=np.max(self.pce_deg))
         
         # ---- Run simulations at X ----
-        if not hasattr(ExpDesign, 'Y') or ExpDesign.Y is None:
+        if not hasattr(InputSpace, 'Y') or InputSpace.Y is None:
             print('\n Now the forward model needs to be run!\n')
-            ED_Y, up_ED_X = Model.run_model_parallel(ExpDesign.X)
+            ED_Y, up_ED_X = Model.run_model_parallel(InputSpace.X)
             self.ModelOutputDict = ED_Y
-            ExpDesign.Y = ED_Y
+            InputSpace.Y = ED_Y
         else:
             # Check if a dict has been passed.
-            if type(ExpDesign.Y) is dict:
-                self.ModelOutputDict = ExpDesign.Y
+            if type(InputSpace.Y) is dict:
+                self.ModelOutputDict = InputSpace.Y
             else:
                 raise Exception('Please provide either a dictionary or a hdf5'
-                                'file to ExpDesign.hdf5_file argument.')
+                                'file to InputSpace.hdf5_file argument.')
 
-        return ExpDesign.X_tr, self.ModelOutputDict
+        return InputSpace.X_tr, self.ModelOutputDict
 
     # -------------------------------------------------------------------------
     def univ_basis_vals(self, samples, n_max=None):
@@ -542,13 +542,13 @@ class MetaModel():
             All univariate regressors up to n_max.
         """
         # Extract information
-        poly_types = self.ExpDesign.poly_types
+        poly_types = self.InputSpace.poly_types
         if samples.ndim != 2:
             samples = samples.reshape(1, len(samples))
         n_max = np.max(self.pce_deg) if n_max is None else n_max
 
         # Extract poly coeffs
-        if self.ExpDesign.input_data_given or self.ExpDesign.apce:
+        if self.InputSpace.input_data_given or self.InputSpace.apce:
             apolycoeffs = self.polycoeffs
         else:
             apolycoeffs = None
@@ -1162,8 +1162,7 @@ class MetaModel():
         return gp
 
     # -------------------------------------------------------------------------
-    def eval_metamodel(self, samples=None, nsamples=None,
-                       sampling_method='random', return_samples=False):
+    def eval_metamodel(self, samples):
         """
         Evaluates meta-model at the requested samples. One can also generate
         nsamples.
@@ -1188,21 +1187,11 @@ class MetaModel():
         std_pred : dict
             Standard deviatioon of the predictions.
         """
-        # Generate or transform (if need be) samples
-        if samples is None:
-            # Generate
-            samples = self.ExpDesign.generate_samples(
-                nsamples,
-                sampling_method
-                )
-
         # Transform samples to the independent space
-        samples = self.ExpDesign.transform(
+        samples = self.InputSpace.transform(
             samples,
             method='user'
             )
-        # print(samples)
-
         # Compute univariate bases for the given samples
         if self.meta_model_type.lower() != 'gpe':
             univ_p_val = self.univ_basis_vals(
@@ -1299,10 +1288,7 @@ class MetaModel():
             else:
                 std_pred[output] = std_pred_b[b_i][output]
 
-        if return_samples:
-            return mean_pred, std_pred, samples
-        else:
-            return mean_pred, std_pred
+        return mean_pred, std_pred
 
     # -------------------------------------------------------------------------
     def create_model_error(self, X, y, Model, name='Calib'):
@@ -1437,11 +1423,11 @@ class MetaModel():
         new_MetaModelOpts = copy.deepcopy(self)
         new_MetaModelOpts.ModelObj = ModelObj
         new_MetaModelOpts.input_obj = InputObj
-        new_MetaModelOpts.ExpDesign.meta_Model = 'aPCE'
-        new_MetaModelOpts.ExpDesign.InputObj = InputObj
-        new_MetaModelOpts.ExpDesign.ndim = len(InputObj.Marginals)
+        new_MetaModelOpts.InputSpace.meta_Model = 'aPCE'
+        new_MetaModelOpts.InputSpace.InputObj = InputObj
+        new_MetaModelOpts.InputSpace.ndim = len(InputObj.Marginals)
         new_MetaModelOpts.n_params = len(InputObj.Marginals)
-        new_MetaModelOpts.ExpDesign.hdf5_file = None
+        new_MetaModelOpts.InputSpace.hdf5_file = None
 
         return new_MetaModelOpts
 
@@ -1468,7 +1454,7 @@ class MetaModel():
         max_deg = np.max(self.pce_deg)
         min_Deg = np.min(self.pce_deg)
         # TODO: remove the options for sequential?
-        #nitr = n_samples - self.ExpDesign.n_init_samples
+        #nitr = n_samples - self.InputSpace.n_init_samples
 
         # Check q-norm
         if not np.isscalar(self.pce_q_norm):
@@ -1485,8 +1471,8 @@ class MetaModel():
 
         deg_new = max_deg
         #d = nitr if nitr != 0 and self.n_params > 5 else 1
-        d = 1
-        min_index = np.argmin(abs(M_uptoMax(max_deg)-ndim*n_samples*d))
+        # d = 1
+        # min_index = np.argmin(abs(M_uptoMax(max_deg)-ndim*n_samples*d))
         # deg_new = range(1, max_deg+1)[min_index]
 
         if deg_new > min_Deg and self.pce_reg_method.lower() != 'fastard':
@@ -1497,18 +1483,18 @@ class MetaModel():
         return deg_array
 
     def generate_polynomials(self, max_deg=None):
-        # Check for ExpDesign
-        if not hasattr(self, 'ExpDesign'):
-            raise AttributeError('Generate or add ExpDesign before generating polynomials')
+        # Check for InputSpace
+        if not hasattr(self, 'InputSpace'):
+            raise AttributeError('Generate or add InputSpace before generating polynomials')
             
-        ndim = self.ExpDesign.ndim
+        ndim = self.InputSpace.ndim
         # Create orthogonal polynomial coefficients if necessary
         if (self.meta_model_type.lower()!='gpe') and max_deg is not None:# and Inputs.poly_coeffs_flag:
             self.polycoeffs = {}
             for parIdx in tqdm(range(ndim), ascii=True,
                                desc="Computing orth. polynomial coeffs"):
                 poly_coeffs = apoly_construction(
-                    self.ExpDesign.raw_data[parIdx],
+                    self.InputSpace.raw_data[parIdx],
                     max_deg
                     )
                 self.polycoeffs[f'p_{parIdx+1}'] = poly_coeffs
-- 
GitLab