From 5fdb334694cccc706c7e4c0cbce367bcacb5ba23 Mon Sep 17 00:00:00 2001
From: kohlhaasrebecca <rebecca.kohlhaas@outlook.com>
Date: Fri, 15 Dec 2023 22:19:01 +0100
Subject: [PATCH] Start testing MetaModel

Started testing MetaModel while also refactoring a bit more by moving code between ExpDesign and MetaModel
---
 .coverage                                     | Bin 53248 -> 53248 bytes
 .../test_analytical_function.py               | 256 ++++++++++++++++++
 src/bayesvalidrox/pylink/pylink.py            |  30 +-
 .../surrogate_models/apoly_construction.py    |   2 +
 .../surrogate_models/exp_designs.py           | 177 +++++++++---
 .../surrogate_models/glexindex.py             |  83 ++----
 .../surrogate_models/surrogate_models.py      |  83 +++---
 tests/test_ExpDesign.py                       | 256 ++++++++++++++++--
 tests/test_Input.py                           |   3 -
 tests/test_MetaModel.py                       |  97 ++++++-
 tests/test_polyconst.py                       |  69 +++++
 tests/test_pylink.py                          |  80 ++++++
 12 files changed, 972 insertions(+), 164 deletions(-)
 create mode 100644 examples/analytical-function/test_analytical_function.py
 create mode 100644 tests/test_polyconst.py
 create mode 100644 tests/test_pylink.py

diff --git a/.coverage b/.coverage
index 6ca70d8588916209ef155569ae86387872500843..5a50f3b44fe726ce44b701f18e81b5a7962fe24f 100644
GIT binary patch
delta 1177
zcmbVLZD?Cn7=G`)=YE{rkJGK~vNpMESlJNPHMLWxOq=;pX}4+pwaGRNF?MTRc9WVE
z5*fLvow6=cS|e^1rNuvjqhbt7p~VKQKZLR2k1;oumH2^!K@-?$+)CoPv9a;5hs$}-
z`@GM4&ig*++{6r*nBl%a`@3{B3uJ=q({5>RYb|P8?NNSIh7>{mNN%@%Wjly}#$imP
z&!jH#hIm?3gel=+KF43+d(dBKHa&R?2?ulpBTcCvdOB12ZkdE)fkER`dnopTlj1E^
zd~+xmi1~v?IAVnyHKDD1Gxh9MqSkAKhAX6|8j_5=qoL>e!-FS{OyBdbV!>c&)Mq7`
z{ZZfPP@q42+-LX#vB;p`7ubDb*ziXNgU0ble>mdZ2zaXjT7?Y&bo4opN%Ew4Q#6DR
zg-%hGE=eA3S$juwDK~6~6jANP5#<eBhkvnsEq^S#)i3#9`HOrnx=&NBk^bN$LI)9*
zVCd)`O9Jg*3G^2cRbU<7RI7lqCSvCu2M`rtou^g>Xs+S>r~ku#%o}LLhpqM>`9JMv
z*1VFdOP5xF+#`3$Z{%HaR?BL=TBoLKjQXv5UOlQdD588#UXmB&Mp>~P!OM6GkKqoy
zgNgytN@i2ttYi`$G1x_)!1DHI7NY5BT60ohn-fJ_UuoOYy1k*^LH&p5g~Rf;!n14k
z@|b678aGq0p&8Ac<&HG*3?6Bxmz^J<dpwt8q0aqH(YrQU6d~t1XBLu`&t3YiZ!I}p
zw0jEfc+S3995a{P@q#1Sl3OggOZIY-`lTExw8qa-R=K{+W@2OSBeNb`=VR%FbywL7
zUVu?%rct6`o76+0i&J+NzE|cO^OxG~!=hEo>tf!`z|B@jtbB5Nl(F*GM}D<z?qoXN
zi}`;Ko>3riOn}6vrPlx?7hB~3Ac&67Ykxeki-EguJ~jU!ZZ3MRlgqbPcK|%L3t$4^
z<<hk)XZF!N$X;d__KZ9&(1S-=Azxm-rL6Ciec(-`hLR1FFX7bUd4Xkirml}C%t~}X
z;AsOIKX4dX6}O>3O<|*eVySGJEOh)-{}0AMf-bBIB@5VZO_i~=x~N#!h9?P}wLdo?
zveeeGciosl7;lll7QNqB`EkhJlsNIIK)Oj6*-PKTJ*`ooTt?R`#_6mWha+=ndd|h^
fB$yJjlpZ4y5~TFqgm5|sD(AE&2c1&!t8e`cV}@If

delta 753
zcmYjPO-vI(6rOEoW@q+qiqTXyMtd+34pPx*h%ph2AXM-N6S<%z3JONC1Wk&8b|GF+
zgW_W1!30iv@gS~;0s%ds2Q|THqBkL&JZYP#R4m}zg%UTj-@Ny|@6CKKyVi)`8u35F
z<z*&GAu%c%1u`BR<@!gxP5YzW*LJ9r>T%_>a+xpjA-;>fV^Mig9*}p^aavDhk{TWC
zCbZ7PsCS!vGP2vYDmRlt$bxH`N^06;*RMGxRp&UZlm64G)s^ws;g-&}i|r+`E(IZE
zk})LC$baR#bb_9eOW1qXBqZaBQK2m=SF|Gi5`UoCyp%61U(_k}g#Jl9r;Ln^4)zdy
z41<G$hCSdQNX!TZ2dmk-gVX0ZsRI?yTVr_xPA<wqWhQS%Vp>cHM?^(~@z5wWiVa=Q
z=#G9vkLXxkP+zMr)Tb&@a>^0@f%o$+-p-qyf<O`9#7r#l4JM8eUI@{~LY%s(I{p1a
zIB9Yq_HlLrW`u{ifL%bApbTmTt8S5r#Cs!h`Y5t~Gc-5Ry4d7N)*!dEGF*f1G;e+L
zc2)z9t``|fKWgi7({h9+zkZ*Zo&7QM%ilR3mOKqE_BMbJvZUSv6fWwE)AY*1)%+oI
z60}19U`5cLxdj67XW7dGR+19e9%-$|uD$K}TQc7R`s^}*Z}padH&+M<3*P*E5Ll~q
zj|A}L17uOZd$zw-_Q4+T{S5DyFz0cM(p8OHIyxj&bhflR^D--E1e#6;R_24=iso()
xXM+bb;lzA2HyuvEVki^#ejrh6%lG!e2MGwLb~qLCn<8P$uUzJkuoE+V{{Y-oytDuS

diff --git a/examples/analytical-function/test_analytical_function.py b/examples/analytical-function/test_analytical_function.py
new file mode 100644
index 000000000..edb0cee0c
--- /dev/null
+++ b/examples/analytical-function/test_analytical_function.py
@@ -0,0 +1,256 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""
+This test shows a surrogate-assisted Bayesian calibration of a time dependent
+    analytical function.
+
+Author: Farid Mohammadi, M.Sc.
+E-Mail: farid.mohammadi@iws.uni-stuttgart.de
+Department of Hydromechanics and Modelling of Hydrosystems (LH2)
+Institute for Modelling Hydraulic and Environmental Systems (IWS), University
+of Stuttgart, www.iws.uni-stuttgart.de/lh2/
+Pfaffenwaldring 61
+70569 Stuttgart
+
+Created on Fri Aug 9 2019
+
+"""
+
+import numpy as np
+import pandas as pd
+import sys
+import joblib
+# import bayesvalidrox
+# Add BayesValidRox path
+sys.path.append("src/")
+
+from bayesvalidrox.pylink.pylink import PyLinkForwardModel
+from bayesvalidrox.surrogate_models.inputs import Input
+from bayesvalidrox.surrogate_models.surrogate_models import MetaModel
+from bayesvalidrox.surrogate_models.meta_model_engine import MetaModelEngine
+from bayesvalidrox.post_processing.post_processing import PostProcessing
+from bayesvalidrox.bayes_inference.bayes_inference import BayesInference
+from bayesvalidrox.bayes_inference.discrepancy import Discrepancy
+import matplotlib
+matplotlib.use('agg')
+
+
+if __name__ == "__main__":
+
+    # Number of parameters
+    ndim = 10  # 2, 10
+
+    # =====================================================
+    # =============   COMPUTATIONAL MODEL  ================
+    # =====================================================
+    Model = PyLinkForwardModel()
+
+    Model.link_type = 'Function'
+    Model.py_file = 'analytical_function'
+    Model.name = 'AnalyticFunc'
+
+    Model.Output.names = ['Z']
+
+    # For Bayesian inversion synthetic data with X=[0,0]
+    Model.observations = {}
+    Model.observations['Time [s]'] = np.arange(0, 10, 1.) / 9
+    Model.observations['Z'] = np.repeat([2.], 10)
+
+    # For Checking with the MonteCarlo refrence
+    Model.mc_reference = {}
+    Model.mc_reference['Time [s]'] = np.arange(0, 10, 1.) / 9
+    Model.mc_reference['mean'] = np.load(f"data/mean_{ndim}.npy")
+    Model.mc_reference['std'] = np.load(f"data/std_{ndim}.npy")
+
+    # =====================================================
+    # =========   PROBABILISTIC INPUT MODEL  ==============
+    # =====================================================
+    # Define the uncertain parameters with their mean and
+    # standard deviation
+    Inputs = Input()
+
+    # Assuming dependent input variables
+    # Inputs.Rosenblatt = True
+
+    for i in range(ndim):
+        Inputs.add_marginals()
+        Inputs.Marginals[i].name = "$\\theta_{"+str(i+1)+"}$"
+        Inputs.Marginals[i].dist_type = 'uniform'
+        Inputs.Marginals[i].parameters = [-5, 5]
+
+    # arbitrary polynomial chaos
+    # inputParams = np.load('data/InputParameters_{}.npy'.format(ndim))
+    # for i in range(ndim):
+    #     Inputs.add_marginals()
+    #     Inputs.Marginals[i].name = f'$X_{i+1}$'
+    #     Inputs.Marginals[i].input_data = inputParams[:, i]
+
+    # =====================================================
+    # ==========  DEFINITION OF THE METAMODEL  ============
+    # =====================================================
+    MetaModelOpts = MetaModel(Inputs, Model)
+
+    # Select if you want to preserve the spatial/temporal depencencies
+    # MetaModelOpts.dim_red_method = 'PCA'
+    # MetaModelOpts.var_pca_threshold = 99.999
+    # MetaModelOpts.n_pca_components = 10
+
+    # Select your metamodel method
+    # 1) PCE (Polynomial Chaos Expansion) 2) aPCE (arbitrary PCE)
+    # 3) GPE (Gaussian Process Emulator)
+    MetaModelOpts.meta_model_type = 'aPCE'
+
+    # ------------------------------------------------
+    # ------------- PCE Specification ----------------
+    # ------------------------------------------------
+    # Select the sparse least-square minimization method for
+    # the PCE coefficients calculation:
+    # 1)OLS: Ordinary Least Square  2)BRR: Bayesian Ridge Regression
+    # 3)LARS: Least angle regression  4)ARD: Bayesian ARD Regression
+    # 5)FastARD: Fast Bayesian ARD Regression
+    # 6)BCS: Bayesian Compressive Sensing
+    # 7)OMP: Orthogonal Matching Pursuit
+    # 8)VBL: Variational Bayesian Learning
+    # 9)EBL: Emperical Bayesian Learning
+    MetaModelOpts.pce_reg_method = 'FastARD'
+
+    # Bootstraping
+    # 1) normal 2) fast
+    MetaModelOpts.bootstrap_method = 'fast'
+    MetaModelOpts.n_bootstrap_itrs = 1
+
+    # Specify the max degree to be compared by the adaptive algorithm:
+    # The degree with the lowest Leave-One-Out cross-validation (LOO)
+    # error (or the highest score=1-LOO)estimator is chosen as the final
+    # metamodel. pce_deg accepts degree as a scalar or a range.
+    MetaModelOpts.pce_deg = 12
+
+    # q-quasi-norm 0<q<1 (default=1)
+    MetaModelOpts.pce_q_norm = 0.85 if ndim < 5 else 0.5
+
+    # Print summary of the regression results
+    # MetaModelOpts.verbose = True
+
+    # ------------------------------------------------
+    # ------ Experimental Design Configuration -------
+    # ------------------------------------------------
+    MetaModelOpts.add_ExpDesign()
+
+    # One-shot (normal) or Sequential Adaptive (sequential) Design
+    MetaModelOpts.ExpDesign.method = 'normal'
+    MetaModelOpts.ExpDesign.n_init_samples = 3*ndim
+
+    # Sampling methods
+    # 1) random 2) latin_hypercube 3) sobol 4) halton 5) hammersley
+    # 6) chebyshev(FT) 7) grid(FT) 8)user
+    MetaModelOpts.ExpDesign.sampling_method = 'latin_hypercube'
+
+    # Provide the experimental design object with a hdf5 file
+    # MetaModelOpts.ExpDesign.hdf5_file = 'ExpDesign_AnalyticFunc.hdf5'
+
+    # Train the meta model
+    meta_model_engine = MetaModelEngine(MetaModelOpts)
+    meta_model_engine.run()
+    PCEModel = meta_model_engine.MetaModel
+
+    # Save PCE models
+    with open(f'PCEModel_{Model.name}.pkl', 'wb') as output:
+        joblib.dump(PCEModel, output, 2)
+
+    # Load the objects
+    # with open(f"PCEModel_{Model.name}.pkl", "rb") as input:
+    #     PCEModel = joblib.load(input)
+
+    # =====================================================
+    # =========  POST PROCESSING OF METAMODELS  ===========
+    # =====================================================
+    PostPCE = PostProcessing(PCEModel)
+
+    # Plot to check validation visually.
+    PostPCE.valid_metamodel(n_samples=1)
+
+    # Compute and print RMSE error
+    PostPCE.check_accuracy(n_samples=300)
+
+    # Compute the moments and compare with the Monte-Carlo reference
+    if MetaModelOpts.meta_model_type != 'GPE':
+        PostPCE.plot_moments()
+
+    # Plot the evolution of the KLD,BME, and Modified LOOCV error
+    if MetaModelOpts.ExpDesign.method == 'sequential':
+        refBME_KLD = np.load("data/refBME_KLD_"+str(ndim)+".npy")
+        PostPCE.plot_seq_design_diagnostics(refBME_KLD)
+
+    # Plot the sobol indices
+    if MetaModelOpts.meta_model_type != 'GPE':
+        total_sobol = PostPCE.sobol_indices()
+
+    # =====================================================
+    # ========  Bayesian inference with Emulator ==========
+    # =====================================================
+    BayesOpts = BayesInference(PCEModel)
+    BayesOpts.emulator = True
+    BayesOpts.plot_post_pred = True
+
+    # BayesOpts.selected_indices = [0, 3, 5,  7, 9]
+    # BME Bootstrap
+    # BayesOpts.bootstrap = True
+    # BayesOpts.n_bootstrap_itrs = 500
+    # BayesOpts.bootstrap_noise = 100
+
+    # Bayesian cross validation
+    # BayesOpts.bayes_loocv = True
+
+    # Select the inference method
+    import emcee
+    BayesOpts.inference_method = "MCMC"
+    # Set the MCMC parameters passed to self.mcmc_params
+    BayesOpts.mcmc_params = {
+        'n_steps': 1e5,
+        'n_walkers': 30,
+        'moves': emcee.moves.KDEMove(),
+        'multiprocessing': False,
+        'verbose': False
+        }
+
+    # ----- Define the discrepancy model -------
+    BayesOpts.measurement_error = obsData
+
+    # # -- (Option B) --
+    DiscrepancyOpts = Discrepancy('')
+    DiscrepancyOpts.type = 'Gaussian'
+    DiscrepancyOpts.parameters = obsData**2
+    BayesOpts.Discrepancy = DiscrepancyOpts
+
+    # -- (Option C) --
+    # DiscOutputOpts = Input()
+    # # # OutputName = 'Z'
+    # DiscOutputOpts.add_marginals()
+    # DiscOutputOpts.Marginals[0].Nnme = '$\sigma^2_{\epsilon}$'
+    # DiscOutputOpts.Marginals[0].dist_type = 'uniform'
+    # DiscOutputOpts.Marginals[0].parameters =  [0, 10]
+    # BayesOpts.Discrepancy = {'known': DiscrepancyOpts,
+    #                           'infer': Discrepancy(DiscOutputOpts)}
+
+    # BayesOpts.bias_inputs = {'Z':np.arange(0, 10, 1.).reshape(-1,1) / 9}
+    # DiscOutputOpts = Input()
+    # # OutputName = 'lambda'
+    # DiscOutputOpts.add_marginals()
+    # DiscOutputOpts.Marginals[0].name = '$\lambda$'
+    # DiscOutputOpts.Marginals[0].dist_type = 'uniform'
+    # DiscOutputOpts.Marginals[0].parameters = [0, 1]
+
+    # # OutputName = 'sigma_f'
+    # DiscOutputOpts.add_marginals()
+    # DiscOutputOpts.Marginals[1].Name = '$\sigma_f$'
+    # DiscOutputOpts.Marginals[1].dist_type = 'uniform'
+    # DiscOutputOpts.Marginals[1].parameters = [0, 1e-4]
+    # BayesOpts.Discrepancy = Discrepancy(DiscOutputOpts)
+    # BayesOpts.Discrepancy = {'known': DiscrepancyOpts,
+    #                           'infer': Discrepancy(DiscOutputOpts)}
+    # Start the calibration/inference
+    Bayes_PCE = BayesOpts.create_inference()
+
+    # Save class objects
+    with open(f'Bayes_{Model.name}.pkl', 'wb') as output:
+        joblib.dump(Bayes_PCE, output, 2)
diff --git a/src/bayesvalidrox/pylink/pylink.py b/src/bayesvalidrox/pylink/pylink.py
index 7af4a14b8..30d2fbb3d 100644
--- a/src/bayesvalidrox/pylink/pylink.py
+++ b/src/bayesvalidrox/pylink/pylink.py
@@ -25,7 +25,7 @@ class PyLinkForwardModel(object):
     ----------
     link_type : str
         The type of the wrapper. The default is `'pylink'`. This runs the
-        third-party software or an executable using a sell command with given
+        third-party software or an executable using a shell command with given
         input files.
         Second option is `'function'` which assumed that model can be run using
         a function written separately in a Python script.
@@ -143,6 +143,34 @@ class PyLinkForwardModel(object):
 
     # -------------------------------------------------------------------------
     def within_range(self, out, minout, maxout):
+        """
+        Checks if all the values in out lie between minout and maxout
+
+        Parameters
+        ----------
+        out : array or list
+            Data to check against range
+        minout : int
+            Lower bound of the range
+        maxout : int
+            Upper bound of the range
+
+        Returns
+        -------
+        inside : bool
+            True if all values in out are in the specified range
+
+        """
+        try:
+            out = np.array(out)
+        except:
+            raise AttributeError('The given values should be a 1D array, but are not')
+        if out.ndim != 1:
+                raise AttributeError('The given values should be a 1D array, but are not')
+            
+        if minout > maxout:
+            raise ValueError('The lower and upper bounds do not form a valid range, they might be switched')
+        
         inside = False
         if (out > minout).all() and (out < maxout).all():
             inside = True
diff --git a/src/bayesvalidrox/surrogate_models/apoly_construction.py b/src/bayesvalidrox/surrogate_models/apoly_construction.py
index a7914c7de..40830fe8a 100644
--- a/src/bayesvalidrox/surrogate_models/apoly_construction.py
+++ b/src/bayesvalidrox/surrogate_models/apoly_construction.py
@@ -32,6 +32,8 @@ def apoly_construction(Data, degree):
         The coefficients of the univariate orthonormal polynomials.
 
     """
+    if Data.ndim !=1:
+        raise AttributeError('Data should be a 1D array')
 
     # Initialization
     dd = degree + 1
diff --git a/src/bayesvalidrox/surrogate_models/exp_designs.py b/src/bayesvalidrox/surrogate_models/exp_designs.py
index 2b602b070..602717cf7 100644
--- a/src/bayesvalidrox/surrogate_models/exp_designs.py
+++ b/src/bayesvalidrox/surrogate_models/exp_designs.py
@@ -7,6 +7,8 @@ import itertools
 import chaospy
 import scipy.stats as st
 from tqdm import tqdm
+import h5py
+import os
 
 from .apoly_construction import apoly_construction
 
@@ -259,9 +261,6 @@ class ExpDesigns:
                 if marginals.dist_type == None:
                     raise AssertionError('Not all marginals were provided priors')
                     break
-             #   if marginals.dist_type != None and marginals.parameters == None:
-             #       raise AssertionError('Some distributions do not have characteristic values')
-             #       break
             if np.array(marginals.input_data).shape[0] and (marginals.dist_type != None):
                 raise AssertionError('Both samples and distribution type are given. Please choose only one.')
                 break
@@ -286,6 +285,15 @@ class ExpDesigns:
         if self.input_data_given == 2:
             raise AssertionError('Distributions cannot be built as the priors have different types')
             
+    
+        # Get the bounds if input_data are directly defined by user:
+        if self.input_data_given:
+            for i in range(self.ndim):
+                low_bound = np.min(Inputs.Marginals[i].input_data)
+                up_bound = np.max(Inputs.Marginals[i].input_data)
+                Inputs.Marginals[i].parameters = [low_bound, up_bound]
+
+            
     # -------------------------------------------------------------------------
     def generate_samples(self, n_samples, sampling_method='random',
                          transform=False):
@@ -318,7 +326,7 @@ class ExpDesigns:
         return samples.T
 
     # -------------------------------------------------------------------------
-    def generate_ED(self, n_samples, sampling_method='random', transform=False,
+    def generate_ED(self, n_samples, transform=False,
                     max_pce_deg=None):
         """
         Generates experimental designs (training set) with the given method.
@@ -341,23 +349,20 @@ class ExpDesigns:
 
         """
         Inputs = self.InputObj
-        self.ndim = len(Inputs.Marginals)
         if not hasattr(self, 'n_init_samples'):
             self.n_init_samples = self.ndim + 1
         n_samples = int(n_samples)
 
-        # Get the bounds if input_data are directly defined by user:
-        if self.input_data_given:
-            for i in range(self.ndim):
-                low_bound = np.min(Inputs.Marginals[i].input_data)
-                up_bound = np.max(Inputs.Marginals[i].input_data)
-                Inputs.Marginals[i].parameters = [low_bound, up_bound]
-
         # Generate the samples based on requested method
-        self.raw_data, self.bound_tuples = self.init_param_space(max_pce_deg)
+        self.init_param_space(max_pce_deg)
 
+        sampling_method = self.sampling_method
         # Pass user-defined samples as ED
         if sampling_method == 'user':
+            if not hasattr(self, 'X'):
+                raise AttributeError('User-defined sampling cannot proceed as no samples provided. Please add them to this class as attribute X')
+            if not self.X.ndim == 2:
+                raise AttributeError('The provided samples shuld have 2 dimensions')
             samples = self.X
             self.n_samples = len(samples)
 
@@ -370,7 +375,7 @@ class ExpDesigns:
 
             elif sampling_method == 'PCM' or \
                     sampling_method == 'LSCM':
-                samples = self.pcm_sampler(max_pce_deg)
+                samples = self.pcm_sampler(n_samples, max_pce_deg)
 
             else:
                 # Create ExpDesign in the actual space using chaospy
@@ -398,6 +403,96 @@ class ExpDesigns:
                 return tr_samples, samples
         else:
             return samples
+        
+    
+    # -------------------------------------------------------------------------
+    def generate_training_data(self, Model, max_deg):
+        """
+        Prepares the experimental design either by reading from the prescribed
+        data or running simulations.
+
+        Parameters
+        ----------
+        Model : obj
+            Model object.
+
+        Raises
+        ------
+        Exception
+            If model sumulations are not provided properly.
+
+        Returns
+        -------
+        ED_X_tr: array of shape (n_samples, n_params)
+            Training samples transformed by an isoprobabilistic transformation.
+        ED_Y: dict
+            Model simulations (target) for all outputs.
+        """
+        if self.ExpDesignFlag != 'sequential':
+            # Read ExpDesign (training and targets) from the provided hdf5
+            if self.hdf5_file is not None:
+
+                # Read hdf5 file
+                f = h5py.File(self.hdf5_file, 'r+')
+
+                # Read EDX and pass it to ExpDesign object
+                try:
+                    self.X = np.array(f["EDX/New_init_"])
+                except KeyError:
+                    self.X = np.array(f["EDX/init_"])
+
+                # Update number of initial samples
+                self.n_init_samples = self.X.shape[0]
+
+                # Read EDX and pass it to ExpDesign object
+                out_names = self.ModelObj.Output.names
+                self.Y = {}
+
+                # Extract x values
+                try:
+                    self.Y["x_values"] = dict()
+                    for varIdx, var in enumerate(out_names):
+                        x = np.array(f[f"x_values/{var}"])
+                        self.Y["x_values"][var] = x
+                except KeyError:
+                    self.Y["x_values"] = np.array(f["x_values"])
+
+                # Store the output
+                for varIdx, var in enumerate(out_names):
+                    try:
+                        y = np.array(f[f"EDY/{var}/New_init_"])
+                    except KeyError:
+                        y = np.array(f[f"EDY/{var}/init_"])
+                    self.Y[var] = y
+                f.close()
+            else:
+                # Check if an old hdf5 file exists: if yes, rename it
+                hdf5file = f'ExpDesign_{self.ModelObj.name}.hdf5'
+                if os.path.exists(hdf5file):
+                    os.rename(hdf5file, 'old_'+hdf5file)
+
+        # ---- Prepare X samples ----
+        ED_X, ED_X_tr = self.generate_ED(self.n_init_samples,
+                                              transform=True,
+                                              max_pce_deg=max_deg)
+        self.X = ED_X
+        self.collocationPoints = ED_X_tr
+        
+        # ---- Run simulations at X ----
+        if not hasattr(self, 'Y') or self.Y is None:
+            print('\n Now the forward model needs to be run!\n')
+            ED_Y, up_ED_X = Model.run_model_parallel(ED_X)
+            self.X = up_ED_X
+            self.Y = ED_Y
+        else:
+            # Check if a dict has been passed.
+            if type(self.Y) is dict:
+                self.Y = self.Y
+            else:
+                raise Exception('Please provide either a dictionary or a hdf5'
+                                'file to ExpDesign.hdf5_file argument.')
+        self.X = ED_X_tr
+        return self.X, self.Y
 
     # -------------------------------------------------------------------------
     def init_param_space(self, max_deg=None):
@@ -428,12 +523,12 @@ class ExpDesigns:
             self.par_names.append(Inputs.Marginals[parIdx].name)
 
         # Create a multivariate probability distribution
+        # TODO: change this to make max_deg obligatory? at least in some specific cases?
         if max_deg is not None:
             JDist, poly_types = self.build_polytypes(rosenblatt=rosenblatt_flag)
             self.JDist, self.poly_types = JDist, poly_types
 
         if self.input_data_given:
-
             self.MCSize = len(Inputs.Marginals[0].input_data)
             self.raw_data = np.zeros((ndim, self.MCSize))
 
@@ -450,17 +545,6 @@ class ExpDesigns:
             self.raw_data = chaospy.generate_samples(mc_size,
                                                      domain=self.JDist)
 
-        # Create orthogonal polynomial coefficients if necessary
-        if self.apce and max_deg is not None and Inputs.poly_coeffs_flag:
-            self.polycoeffs = {}
-            for parIdx in tqdm(range(ndim), ascii=True,
-                               desc="Computing orth. polynomial coeffs"):
-                poly_coeffs = apoly_construction(
-                    self.raw_data[parIdx],
-                    max_deg
-                    )
-                self.polycoeffs[f'p_{parIdx+1}'] = poly_coeffs
-
         # Extract moments
         for parIdx in range(ndim):
             mu = np.mean(self.raw_data[parIdx])
@@ -480,8 +564,6 @@ class ExpDesigns:
 
         self.bound_tuples = tuple(bound_tuples)
 
-        return self.raw_data, self.bound_tuples
-
     # -------------------------------------------------------------------------
     def build_polytypes(self, rosenblatt):
         """
@@ -593,7 +675,10 @@ class ExpDesigns:
         if None in all_dist_types:
             # Naive approach: Fit a gaussian kernel to the provided data
             Data = np.asarray(all_data)
-            orig_space_dist = st.gaussian_kde(Data)
+            try:
+                orig_space_dist = st.gaussian_kde(Data)
+            except:
+                raise ValueError('The samples provided to the Marginals should be 1D only')
             self.prior_space = orig_space_dist
         else:
             orig_space_dist = chaospy.J(*orig_joints)
@@ -605,7 +690,7 @@ class ExpDesigns:
         return orig_space_dist, poly_types
 
     # -------------------------------------------------------------------------
-    def random_sampler(self, n_samples):
+    def random_sampler(self, n_samples, max_deg = None):
         """
         Samples the given raw data randomly.
 
@@ -613,6 +698,11 @@ class ExpDesigns:
         ----------
         n_samples : int
             Number of requested samples.
+            
+        max_deg : int, optional
+            Maximum degree. The default is `None`.
+            This will be used to run init_param_space, if it has not been done
+            until now.
 
         Returns
         -------
@@ -620,6 +710,9 @@ class ExpDesigns:
             The sampling locations in the input space.
 
         """
+        if not hasattr(self, 'raw_data'):
+            self.init_param_space(max_deg)
+            
         samples = np.zeros((n_samples, self.ndim))
         sample_size = self.raw_data.shape[1]
 
@@ -655,15 +748,18 @@ class ExpDesigns:
         return samples
 
     # -------------------------------------------------------------------------
-    def pcm_sampler(self, max_deg):
+    def pcm_sampler(self, n_samples, max_deg):
         """
         Generates collocation points based on the root of the polynomial
         degrees.
 
         Parameters
         ----------
+        n_samples : int
+            Number of requested samples.
         max_deg : int
-            Maximum degree defined by user.
+            Maximum degree defined by user. Will also be used to run 
+            init_param_space if that has not been done beforehand.
 
         Returns
         -------
@@ -671,6 +767,9 @@ class ExpDesigns:
             Collocation points.
 
         """
+        
+        if not hasattr(self, 'raw_data'):
+            self.init_param_space(max_deg)
 
         raw_data = self.raw_data
 
@@ -681,8 +780,10 @@ class ExpDesigns:
                 result.append(math.factorial(self.ndim+d) //
                               (math.factorial(self.ndim) * math.factorial(d)))
             return np.array(result)
+        print(M_uptoMax(max_deg))
+        print(np.where(M_uptoMax(max_deg) > n_samples)[0])
 
-        guess_Deg = np.where(M_uptoMax(max_deg) > self.n_samples)[0][0]
+        guess_Deg = np.where(M_uptoMax(max_deg) > n_samples)[0][0]
 
         c_points = np.zeros((guess_Deg+1, self.ndim))
 
@@ -708,6 +809,7 @@ class ExpDesigns:
         sort_cpoints = np.empty((0, guess_Deg+1))
 
         for j in range(self.ndim):
+            print(index_CP[:, j])
             sort_cp = c_points[index_CP[:, j], j]
             sort_cpoints = np.vstack((sort_cpoints, sort_cp))
 
@@ -748,8 +850,17 @@ class ExpDesigns:
             Transformed samples.
 
         """
+        # Check for built JDist
         if not hasattr(self, 'JDist'):
             raise AttributeError('Call function init_param_space first to create JDist')
+            
+        # Check if X is 2d
+        if X.ndim != 2:
+            raise AttributeError('X should have two dimensions')
+            
+        # Check if size of X matches Marginals
+        if X.shape[1]!= self.ndim:
+            raise AttributeError('The second dimension of X should be the same size as the number of marginals in the InputObj')
         
         if self.InputObj.Rosenblatt:
             self.origJDist, _ = self.build_polytypes(False)
diff --git a/src/bayesvalidrox/surrogate_models/glexindex.py b/src/bayesvalidrox/surrogate_models/glexindex.py
index 6d9ba3c2f..90877331e 100644
--- a/src/bayesvalidrox/surrogate_models/glexindex.py
+++ b/src/bayesvalidrox/surrogate_models/glexindex.py
@@ -58,16 +58,8 @@ def glexindex(start, stop=None, dimensions=1, cross_truncation=1.,
     start, stop, _ = numpy.broadcast_arrays(start, stop, numpy.empty(dimensions))
 
     cross_truncation = cross_truncation*numpy.ones(2)
-    indices = _glexindex(start, stop, cross_truncation)
-    if indices.size:
-        indices = indices[glexsort(indices.T, graded=graded, reverse=reverse)]
-    return indices
-
-
-def _glexindex(start, stop, cross_truncation=1.):
-    """Backend for the glexindex function."""
-    # At the beginning the current list of indices just ranges over the
-    # last dimension.
+    
+    # Moved here from _glexindex
     bound = stop.max()
     dimensions = len(start)
     start = numpy.clip(start, a_min=0, a_max=None)
@@ -100,8 +92,21 @@ def _glexindex(start, stop, cross_truncation=1.):
         upper = cross_truncate(indices, stop-1, cross_truncation[1])
         indices = indices[lower ^ upper]
 
-    return numpy.array(indices, dtype=int).reshape(-1, dimensions)
-
+    indices = numpy.array(indices, dtype=int).reshape(-1, dimensions)
+    if indices.size:
+        # moved here from glexsort
+        keys = indices.T
+        keys_ = numpy.atleast_2d(keys)
+        if reverse:
+            keys_ = keys_[::-1]
+    
+        indices_sort = numpy.array(numpy.lexsort(keys_))
+        if graded:
+            indices_sort = indices_sort[numpy.argsort(
+                numpy.sum(keys_[:, indices_sort], axis=0))].T
+        
+        indices = indices[indices_sort]
+    return indices
 
 def cross_truncate(indices, bound, norm):
     r"""
@@ -154,57 +159,3 @@ def cross_truncate(indices, bound, norm):
     assert numpy.all(out[numpy.all(indices == 0, axis=-1)])
 
     return out
-
-
-def glexsort(
-    keys: numpy.typing.ArrayLike,
-    graded: bool = False,
-    reverse: bool = False,
-) -> numpy.ndarray:
-    """
-    Sort keys using graded lexicographical ordering.
-    Same as ``numpy.lexsort``, but also support graded and reverse
-    lexicographical ordering.
-    Args:
-        keys:
-            Values to sort.
-        graded:
-            Graded sorting, meaning the indices are always sorted by the index
-            sum. E.g. ``(2, 2, 2)`` has a sum of 6, and will therefore be
-            consider larger than both ``(3, 1, 1)`` and ``(1, 1, 3)``.
-        reverse:
-            Reverse lexicographical sorting meaning that ``(1, 3)`` is
-            considered smaller than ``(3, 1)``, instead of the opposite.
-    Returns:
-        Array of indices that sort the keys along the specified axis.
-    Examples:
-        >>> indices = numpy.array([[0, 0, 0, 1, 2, 1],
-        ...                        [1, 2, 0, 0, 0, 1]])
-        >>> indices[:, numpy.lexsort(indices)]
-        array([[0, 1, 2, 0, 1, 0],
-               [0, 0, 0, 1, 1, 2]])
-        >>> indices[:, numpoly.glexsort(indices)]
-        array([[0, 1, 2, 0, 1, 0],
-               [0, 0, 0, 1, 1, 2]])
-        >>> indices[:, numpoly.glexsort(indices, reverse=True)]
-        array([[0, 0, 0, 1, 1, 2],
-               [0, 1, 2, 0, 1, 0]])
-        >>> indices[:, numpoly.glexsort(indices, graded=True)]
-        array([[0, 1, 0, 2, 1, 0],
-               [0, 0, 1, 0, 1, 2]])
-        >>> indices[:, numpoly.glexsort(indices, graded=True, reverse=True)]
-        array([[0, 0, 1, 0, 1, 2],
-               [0, 1, 0, 2, 1, 0]])
-        >>> indices = numpy.array([4, 5, 6, 3, 2, 1])
-        >>> indices[numpoly.glexsort(indices)]
-        array([1, 2, 3, 4, 5, 6])
-    """
-    keys_ = numpy.atleast_2d(keys)
-    if reverse:
-        keys_ = keys_[::-1]
-
-    indices = numpy.array(numpy.lexsort(keys_))
-    if graded:
-        indices = indices[numpy.argsort(
-            numpy.sum(keys_[:, indices], axis=0))].T
-    return indices
diff --git a/src/bayesvalidrox/surrogate_models/surrogate_models.py b/src/bayesvalidrox/surrogate_models/surrogate_models.py
index c51ea8c66..fb579cf6a 100644
--- a/src/bayesvalidrox/surrogate_models/surrogate_models.py
+++ b/src/bayesvalidrox/surrogate_models/surrogate_models.py
@@ -24,6 +24,7 @@ from .reg_fast_ard import RegressionFastARD
 from .reg_fast_laplace import RegressionFastLaplace
 from .orthogonal_matching_pursuit import OrthogonalMatchingPursuit
 from .bayes_linear import VBLinearRegression, EBLinearRegression
+from .apoly_construction import apoly_construction
 warnings.filterwarnings("ignore")
 # Load the mplstyle
 plt.style.use(os.path.join(os.path.split(__file__)[0],
@@ -112,7 +113,15 @@ class MetaModel():
         self.pce_deg = pce_deg
         self.pce_q_norm = pce_q_norm
         self.dim_red_method = dim_red_method
-        self.verbose = False
+        self.verbose = verbose
+        
+    def check_init(self) -> None:
+        """
+        Checks all the parameter values given to the constructor against 
+        reference lists of what is implemented and can be run
+        """
+        None
+        
 
     # -------------------------------------------------------------------------
     def create_metamodel(self):
@@ -140,14 +149,14 @@ class MetaModel():
                 )
 
         elif self.ExpDesign.method == 'normal':
-            self.ExpDesignFlag = 'normal'
-            self.train_norm_design(Model, verbose=True)
+            self.train_norm_design(verbose=True)
 
         else:
             raise Exception("The method for experimental design you requested"
                             " has not been implemented yet.")
 
         # Zip the model run directories
+        # TODO: can move this elsewhere? Is this important?
         if self.ModelObj.link_type.lower() == 'pylink' and\
            self.ExpDesign.sampling_method.lower() != 'user':
             Model.zip_subdirs(Model.name, f'{Model.name}_')
@@ -176,6 +185,11 @@ class MetaModel():
         Model = self.ModelObj
         # Get the collocation points to run the forward model
         CollocationPoints, OutputDict = self.generate_ExpDesign(Model)
+        
+        # Generate polynomials
+        self.generate_polynomials(np.max(self.pce_deg))
+        self.ndim = self.ExpDesign.ndim
+
 
         # Initialize the nested dictionaries
         if self.meta_model_type.lower() == 'gpe':
@@ -204,8 +218,10 @@ class MetaModel():
             if deg not in np.fromiter(keys, dtype=float):
                 # Generate the polynomial basis indices
                 for qidx, q in enumerate(self.pce_q_norm):
-                    basis_indices = self.create_basis_indices(degree=deg,
-                                                              q_norm=q)
+                    basis_indices = glexindex(start=0, stop=deg+1,
+                                              dimensions=self.n_params,
+                                              cross_truncation=q,
+                                              reverse=False, graded=True)
                     self.allBasisIndices[str(deg)][str(q)] = basis_indices
 
         # Evaluate the univariate polynomials on ExpDesign
@@ -388,32 +404,6 @@ class MetaModel():
 
         return final_out_dict
 
-    # -------------------------------------------------------------------------
-    def create_basis_indices(self, degree, q_norm):
-        """
-        Creates set of selected multi-indices of multivariate polynomials for
-        certain parameter numbers, polynomial degree, hyperbolic (or q-norm)
-        truncation scheme.
-
-        Parameters
-        ----------
-        degree : int
-            Polynomial degree.
-        q_norm : float
-            hyperbolic (or q-norm) truncation.
-
-        Returns
-        -------
-        basis_indices : array of shape (n_terms, n_params)
-            Multi-indices of multivariate polynomials.
-
-        """
-        basis_indices = glexindex(start=0, stop=degree+1,
-                                  dimensions=self.n_params,
-                                  cross_truncation=q_norm,
-                                  reverse=False, graded=True)
-        return basis_indices
-
     # -------------------------------------------------------------------------
     def add_ExpDesign(self):
         """
@@ -425,7 +415,7 @@ class MetaModel():
 
         """
         self.ExpDesign = ExpDesigns(self.input_obj,
-                                    meta_Model=self.meta_model_type)
+                                    meta_Model_type=self.meta_model_type)
 
     # -------------------------------------------------------------------------
     def generate_ExpDesign(self, Model):
@@ -450,6 +440,10 @@ class MetaModel():
         ED_Y: dict
             Model simulations (target) for all outputs.
         """
+        # Create ExpDesign if not already there
+        if not hasattr(self, 'ExpDesign'):
+            self.add_ExpDesign()
+            
         ExpDesign = self.ExpDesign
         if self.ExpDesignFlag != 'sequential':
             # Read ExpDesign (training and targets) from the provided hdf5
@@ -496,13 +490,11 @@ class MetaModel():
 
         # ---- Prepare X samples ----
         ED_X, ED_X_tr = ExpDesign.generate_ED(ExpDesign.n_init_samples,
-                                              ExpDesign.sampling_method,
                                               transform=True,
                                               max_pce_deg=np.max(self.pce_deg))
         ExpDesign.X = ED_X
         ExpDesign.collocationPoints = ED_X_tr
-        self.bound_tuples = ExpDesign.bound_tuples
-
+        
         # ---- Run simulations at X ----
         if not hasattr(ExpDesign, 'Y') or ExpDesign.Y is None:
             print('\n Now the forward model needs to be run!\n')
@@ -545,7 +537,7 @@ class MetaModel():
 
         # Extract poly coeffs
         if self.ExpDesign.input_data_given or self.ExpDesign.apce:
-            apolycoeffs = self.ExpDesign.polycoeffs
+            apolycoeffs = self.polycoeffs
         else:
             apolycoeffs = None
 
@@ -980,7 +972,7 @@ class MetaModel():
         # only the trace is, to save memory)
         PsiM = np.dot(psi_sparse, M)
 
-        h = np.sum(np.multiply(PsiM, psi_sparse), axis=1, dtype=np.float128)
+        h = np.sum(np.multiply(PsiM, psi_sparse), axis=1, dtype=np.longdouble)#float128)
 
         # ------ Calculate Error Loocv for each measurement point ----
         # Residuals
@@ -1495,3 +1487,20 @@ class MetaModel():
             deg_array = np.array([deg_new])
 
         return deg_array
+
+    def generate_polynomials(self, max_deg=None):
+        # Check for ExpDesign
+        if not hasattr(self, 'ExpDesign'):
+            raise AttributeError('Generate or add ExpDesign before generating polynomials')
+            
+        ndim = self.ExpDesign.ndim
+        # Create orthogonal polynomial coefficients if necessary
+        if (self.meta_model_type.lower()!='gpe') and max_deg is not None:# and Inputs.poly_coeffs_flag:
+            self.polycoeffs = {}
+            for parIdx in tqdm(range(ndim), ascii=True,
+                               desc="Computing orth. polynomial coeffs"):
+                poly_coeffs = apoly_construction(
+                    self.ExpDesign.raw_data[parIdx],
+                    max_deg
+                    )
+                self.polycoeffs[f'p_{parIdx+1}'] = poly_coeffs
diff --git a/tests/test_ExpDesign.py b/tests/test_ExpDesign.py
index cec63db49..6a383cae6 100644
--- a/tests/test_ExpDesign.py
+++ b/tests/test_ExpDesign.py
@@ -2,18 +2,18 @@
 """
 Test the ExpDesigns class in bayesvalidrox.
 Tests are available for the following functions:
-    _check_ranges - x
-    fit_dist - x
+    _check_ranges       - x
+    fit_dist            - x
     
 Class ExpDesigns: 
-    check_valid_inputs - x
+    check_valid_inputs  - x
     generate_samples
     generate_ED
-    init_param_space
-    build_polytypes - x
+    init_param_space    - x
+    build_polytypes     - x
     random_sampler
     pcm_sampler
-    transform    
+    transform           - x
 
 @author: Rebecca Kohlhaas
 """
@@ -22,6 +22,7 @@ sys.path.append("src/")
 import pytest
 import numpy as np
 
+from bayesvalidrox.surrogate_models.inputs import Input
 import bayesvalidrox.surrogate_models.exp_designs as exp
 from bayesvalidrox.surrogate_models.exp_designs import ExpDesigns
 
@@ -80,8 +81,6 @@ if 0:
 
 #%% Test ExpDesign.check_valid_input
 
-from bayesvalidrox.surrogate_models.inputs import Input
-
 def test_check_valid_input_hasmarg() -> None:
     """
     Distribution not built if no marginals set
@@ -341,7 +340,7 @@ def test_build_polytypes_weibullerr() -> None:
 
 def test_build_polytypes_weibull() -> None:
     """
-    Build dist 'beta'
+    Build dist 'weibull'
     """
     inp = Input()
     inp.add_marginals()
@@ -373,58 +372,271 @@ def test_build_polytypes_rosenblatt() -> None:
     exp = ExpDesigns(inp)
     exp.build_polytypes(True)
     
+def test_build_polytypes_samples() -> None:
+    """
+    Build dist from samples
+    """
+    x = np.random.uniform(0,1,1000)
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    exp.build_polytypes(False)
+    
+    
+def test_build_polytypes_samples2d() -> None:
+    """
+    Build dist from samples - samples too high dim
+    """
+    x = np.random.uniform(0,1,(2,1000))
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    with pytest.raises(ValueError) as excinfo:
+        exp.build_polytypes(False)
+    assert str(excinfo.value) == 'The samples provided to the Marginals should be 1D only'
+    
 #%% Test ExpDesign.init_param_space
 
-def test_init_param_space_nomaxdeg() -> None:
+def test_init_param_space_nomaxdegsample() -> None:
     """
-    Init param space without max_deg
+    Init param space without max_deg for given samples
     """
-    x = np.random.uniform(0,1,(2,1000))
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = ExpDesigns(inp)
     exp.init_param_space()
+
+def test_init_param_space_nomaxdegdist() -> None:
+    """
+    Init param space without max_deg for given dist
+    """
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'expon'
+    inp.Marginals[0].parameters = [0.5,1,2,3]
+    exp = ExpDesigns(inp)
+    exp.init_param_space()
      
 def test_init_param_space_maxdeg() -> None:
     """
-    Init param space with max_deg
+    Init param space with max_deg for given samples
     """
-    x = np.random.uniform(0,1,(2,1000))
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = ExpDesigns(inp)
     exp.init_param_space(max_deg=2)
+    
+def test_init_param_space_maxdegdist() -> None:
+    """
+    Init param space with max_deg for given dist
+    """
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'expon'
+    inp.Marginals[0].parameters = [0.5,1,2,3]
+    exp = ExpDesigns(inp)
+    exp.init_param_space(max_deg=2)
      
     
 #%% Test ExpDesign.transform
     
-def test_transform() -> None:
+def test_transform_noparamspace() -> None:
     """
     Call transform without a built JDist
     """
-    x = np.random.uniform(0,1,(2,1000))
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(2,1000))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = ExpDesigns(inp)
     with pytest.raises(AttributeError) as excinfo:
-        exp.transform(x)
+        exp.transform(y)
     assert str(excinfo.value) == 'Call function init_param_space first to create JDist'
-    
-def test_transform() -> None:
+      
+def test_transform_dimerrlow() -> None:
     """
-    Call transform without a built JDist
+    Call transform with too few dimensions
     """
-    x = np.random.uniform(0,1,(2,1000))
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = ExpDesigns(inp)
+    exp.init_param_space(max_deg=2)
     with pytest.raises(AttributeError) as excinfo:
         exp.transform(x)
-    assert str(excinfo.value) == 'Call function init_param_space first to create JDist'
+    assert str(excinfo.value) == 'X should have two dimensions'
+          
+def test_transform_dimerrhigh() -> None:
+    """
+    Call transform with too many dimensions
+    """
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(1,1,1000))
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    exp.init_param_space(max_deg=2)
+    with pytest.raises(AttributeError) as excinfo:
+        exp.transform(y)
+    assert str(excinfo.value) == 'X should have two dimensions'
+    
+def test_transform_dimerr0() -> None:
+    """
+    Call transform with wrong X.shape[0]
+    """
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(2,1000))
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    exp.init_param_space(max_deg=2)
+    with pytest.raises(AttributeError) as excinfo:
+        exp.transform(y)
+    assert str(excinfo.value) == 'The second dimension of X should be the same size as the number of marginals in the InputObj'
+   
+def test_transform_paramspace() -> None:
+    """
+    Transform successfully
+    """
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(1000,1))
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    exp.init_param_space(max_deg=2)
+    exp.transform(y)
+    
+#%% Test ExpDesign.pcm_sampler
+
+def test_pcm_sampler_noinit() -> None:
+    """
+    Sample via pcm without init_param_space
+    """
+    x = np.random.uniform(0,1,1000)
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    exp.pcm_sampler(4,2)
+    
+def test_pcm_sampler_lowdeg() -> None:
+    """
+    Sample via pcm with init_param_space and small max_deg
+    """
+    x = np.random.uniform(0,1,1000)
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    exp.init_param_space()
+    exp.pcm_sampler(4,2)
+    
+def test_pcm_sampler_highdeg() -> None:
+    """
+    Sample via pcm with init_param_space and high max_deg
+    """
+    x = np.random.uniform(0,1,1000)
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    exp.init_param_space()
+    exp.pcm_sampler(4,4)
+    
+    
+#%% Test ExpDesign.random_sampler
+
+def test_random_sampler() -> None:
+    """
+    Sample randomly, init_param_space implicitly
+    """
+    x = np.random.uniform(0,1,1000)
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    exp.random_sampler(4)
+    
+#%% Test ExpDesign.generate_samples
+
+def test_generate_samples() -> None:
+    """
+    Generate samples according to chosen scheme
+    """
+    x = np.random.uniform(0,1,1000)
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    samples = exp.generate_samples(4)
+
+
+#%% Test ExpDesign.generate_ED
+
+def test_generate_ED() -> None:
+    """
+    Generate ED as is
+    """
+    x = np.random.uniform(0,1,1000)
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp)
+    samples = exp.generate_ED(4)
+    
+
+def test_generate_ED_usernoX() -> None:
+    """
+    User-defined ED without samples
+    """
+    x = np.random.uniform(0,1,1000)
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp, sampling_method = 'user')
+    with pytest.raises(AttributeError) as excinfo:
+        samples = exp.generate_ED(4)
+    assert str(excinfo.value) == 'User-defined sampling cannot proceed as no samples provided. Please add them to this class as attribute X'
+
+def test_generate_ED_userXdimerr() -> None:
+    """
+    User-defined ED with wrong shape of samples
+    """
+    x = np.random.uniform(0,1,1000)
+    X = np.random.uniform(0,1,(2,1,1000))
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp, sampling_method = 'user')
+    exp.X = X
+    with pytest.raises(AttributeError) as excinfo:
+        samples = exp.generate_ED(4)
+    assert str(excinfo.value) == 'The provided samples shuld have 2 dimensions'
+    
+
+def test_generate_ED_userX() -> None:
+    """
+    User-defined ED with wrong shape of samples
+    """
+    x = np.random.uniform(0,1,1000)
+    X = np.random.uniform(0,1,(3,1000))
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp, sampling_method = 'user')
+    exp.X = X
+    samples = exp.generate_ED(4)
 
 if __name__ == '__main__':
     None
diff --git a/tests/test_Input.py b/tests/test_Input.py
index 0923e0d96..84b9b239c 100644
--- a/tests/test_Input.py
+++ b/tests/test_Input.py
@@ -23,6 +23,3 @@ def test_addmarginals() -> None:
     inp = Input()
     inp.add_marginals()
     assert len(inp.Marginals) == 1
-
-if __name__ == '__main__':
-    None
diff --git a/tests/test_MetaModel.py b/tests/test_MetaModel.py
index 4ae14361a..d8bcfe0e1 100644
--- a/tests/test_MetaModel.py
+++ b/tests/test_MetaModel.py
@@ -3,9 +3,26 @@
 Test the MetaModel class in bayesvalidrox.
 Tests are available for the following functions
 Class MetaModel: 
-    create_metamodel
-    train_norm_design
+    create_metamodel *not used again
+    train_norm_design 
     update_pce_coeffs
+    create_basis_indices --removed, just redirects
+    add_ExpDesign                                   -x
+    generate_ExpDesign
+    univ_basis_vals
+    create_psi
+    fit
+    adaptive_regression
+    corr_loocv_error
+    pca_transformation
+    gaussian_process_emulator
+    eval_metamodel
+    create_model_error
+    eval_model_error
+    auto_vivification
+    copy_meta_model_opts
+    __select_degree
+    generate_polynomials
 
 @author: Rebecca Kohlhaas
 """
@@ -13,7 +30,83 @@ import sys
 sys.path.append("src/")
 import pytest
 
+from bayesvalidrox.surrogate_models.inputs import Input
+from bayesvalidrox.surrogate_models.exp_designs import ExpDesigns
 from bayesvalidrox.surrogate_models.surrogate_models import MetaModel
+from bayesvalidrox.pylink.pylink import PyLinkForwardModel as PL
 
+#%% Test MetaMod constructor on its own
+
+def test_metamod() -> None:
+    """
+    Build MetaModel without inputs
+    """
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'normal'
+    inp.Marginals[0].parameters = [0,1]
+    mod = PL()
+    mm = MetaModel(inp, mod)
+
+
+#%% Test MetaMod.generate_polynomials
+
+def test_generate_polynomials_noExp() -> None:
+    """
+    Generate polynomials without ExpDeg
+    """
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'normal'
+    inp.Marginals[0].parameters = [0,1]
+    mod = PL()
+    mm = MetaModel(inp, mod)
+    with pytest.raises(AttributeError) as excinfo:
+        mm.generate_polynomials()
+    assert str(excinfo.value) == 'Generate or add ExpDesign before generating polynomials'
+    
+def test_generate_polynomials_nodeg() -> None:
+    """
+    Generate polynomials without max_deg
+    """
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'normal'
+    inp.Marginals[0].parameters = [0,1]
+    mod = PL()
+    mm = MetaModel(inp, mod)
+    with pytest.raises(AttributeError) as excinfo:
+        mm.generate_polynomials()
+    assert str(excinfo.value) == ''
+    
+
+def test_generate_polynomials_deg() -> None:
+    """
+    Generate polynomials with max_deg
+    """
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'normal'
+    inp.Marginals[0].parameters = [0,1]
+    mod = PL()
+    mm = MetaModel(inp, mod)
+    mm.generate_polynomials(4)
+    
+    
+#%% Test MetaMod.add_ExpDesign
+
+def test_add_ExpDesign() -> None:
+    """
+    Generate ExpDesign in class
+    """
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'normal'
+    inp.Marginals[0].parameters = [0,1]
+    mod = PL()
+    mm = MetaModel(inp, mod)
+    mm.add_ExpDesign()
+    
+    
 if __name__ == '__main__':
     None
diff --git a/tests/test_polyconst.py b/tests/test_polyconst.py
new file mode 100644
index 000000000..2dca4efe1
--- /dev/null
+++ b/tests/test_polyconst.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+"""
+Test the various classes and functions provided for constructing the polynomials.
+Tests are available for the following functions:
+    apoly_construction      - x
+    glexindex             
+    cross_truncate
+
+@author: Rebecca Kohlhaas
+"""
+import sys
+sys.path.append("src/")
+import pytest
+import numpy as np
+
+from bayesvalidrox.surrogate_models.apoly_construction import apoly_construction
+from bayesvalidrox.surrogate_models.glexindex import glexindex, cross_truncate
+
+#%% Test apoly_construction
+
+def test_apoly_construction_dimerr() -> None:
+    """
+    Cannot construct with wrong dim of data
+    """
+    data = np.random.uniform(0,1,(3,1000))
+    with pytest.raises(AttributeError) as excinfo:
+        apoly_construction(data, 3)
+    assert str(excinfo.value) == 'Data should be a 1D array'
+
+def test_apoly_construction() -> None:
+    """
+    Construct poly for aPC
+    """
+    data = np.random.uniform(0,1,1000)
+    apoly_construction(data, 3)
+
+def test_apoly_construction_deg0() -> None:
+    """
+    Construct poly for aPC for degree 0
+    """
+    data = np.random.uniform(0,1,1000)
+    apoly_construction(data, 0)
+    
+def test_apoly_construction_negdeg() -> None:
+    """
+    Construct poly for aPC for negative degree -- this works??
+    """
+    data = np.random.uniform(0,1,1000)
+    apoly_construction(data, -2)
+    
+#%% Test glexindex
+    
+def test_glexindex() -> None:
+    """
+    Create monomial exponent dict
+    """
+    glexindex(0)
+    
+#%% Test cross_truncate
+    
+def test_cross_truncate()-> None:
+    """
+    Truncate indices via Lp norm
+    """
+    #cross_truncate(np.array([0,1,2]), 2, 1)
+    None
+
+if __name__ == '__main__':
+    None
diff --git a/tests/test_pylink.py b/tests/test_pylink.py
new file mode 100644
index 000000000..ba99b186f
--- /dev/null
+++ b/tests/test_pylink.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+"""
+Test the PyLinkForwardModel class in bayesvalidrox.
+Tests are available for the following functions
+PyLinkForwardModel:
+    within_range *not used again in here            - x
+    read_observation *not used again in here
+    read_mc_reference *not used again in here
+    read_output *used only once
+    update_input_params *used only once
+    run_command *used only once
+    run_forwardmodel
+    run_model_parallel
+    _store_simulations *used only once
+    zip_subdirs *used in metamodel again
+OutputData:
+    constructor only
+
+@author: Rebecca Kohlhaas
+"""
+import sys
+sys.path.append("src/")
+import pytest
+
+from bayesvalidrox.pylink.pylink import PyLinkForwardModel as PL
+
+#%% Test constructor
+
+def test_PL() -> None:
+    """
+    Build PyLinkForwardModel without inputs
+    """
+    pl = PL()
+
+
+#%% Test PyLink.within_range
+
+def test_within_range_noarray() -> None:
+    """
+    Value not an array
+    """
+    pl = PL()
+    with pytest.raises(AttributeError) as excinfo:
+        pl.within_range(1,2,3)
+    assert str(excinfo.value) == 'The given values should be a 1D array, but are not'
+
+def test_within_range_2d() -> None:
+    """
+    Value not an array
+    """
+    pl = PL()
+    with pytest.raises(AttributeError) as excinfo:
+        pl.within_range([[1],[2]],2,3)
+    assert str(excinfo.value) == 'The given values should be a 1D array, but are not'
+
+def test_within_range_err() -> None:
+    """
+    Value not in range
+    """
+    pl = PL()
+    assert pl.within_range([1],2,3) == False
+
+def test_within_range_switchbounds() -> None:
+    """
+    Switched min and max
+    """
+    pl = PL()
+    with pytest.raises(ValueError) as excinfo:
+        pl.within_range([1],4,3)
+    assert str(excinfo.value) == 'The lower and upper bounds do not form a valid range, they might be switched'
+
+def test_within_range() -> None:
+    """
+    Value in range
+    """
+    pl = PL()
+    assert pl.within_range([1],0,3) == True
+
+if __name__ == '__main__':
+    None
-- 
GitLab