From aaaebafdeccf1c05270412b5244af94bb141939d Mon Sep 17 00:00:00 2001
From: Rebecca Kohlhaas <rebecca.kohlhaas@iws.uni-stuttgart.de>
Date: Tue, 23 Apr 2024 15:16:22 +0000
Subject: [PATCH] Revert "Merge branch 'fix/BMC' into 'master'"

This reverts merge request !18
---
 .coverage                                     |  Bin 53248 -> 0 bytes
 .coverage.DESKTOP-ATMEKSV.24388.XANLHOVx      |  Bin 53248 -> 0 bytes
 .coverage.DESKTOP-ATMEKSV.29708.XilfwCcx      |  Bin 49152 -> 0 bytes
 .coverage.DESKTOP-ATMEKSV.33796.XdyCYWmx      |  Bin 4096 -> 0 bytes
 .coverage.DESKTOP-ATMEKSV.43284.XuuJaTEx      |  Bin 32768 -> 0 bytes
 .coverage.DESKTOP-ATMEKSV.49024.XoHLhHmx      |  Bin 53248 -> 0 bytes
 .coverage.DESKTOP-ATMEKSV.50700.XPPOhAcx      |  Bin 24576 -> 0 bytes
 .coverage.DESKTOP-ATMEKSV.51388.XJOycQex      |  Bin 53248 -> 0 bytes
 .idea/.gitignore                              |    8 -
 .idea/bayesvalidrox.iml                       |   18 -
 .../inspectionProfiles/profiles_settings.xml  |    6 -
 .idea/misc.xml                                |    7 -
 .idea/modules.xml                             |    8 -
 .idea/other.xml                               |    6 -
 .idea/vcs.xml                                 |    6 -
 docs/diagrams/.$Structure_BayesInf.drawio.bkp |  908 -----------
 .../diagrams/.$Structure_BayesInf.drawio.dtmp |  964 ------------
 docs/diagrams/Structure_BayesInf.drawio       |  964 ------------
 examples/.coverage                            |  Bin 53248 -> 0 bytes
 .../example_analytical_function.py            |   66 +-
 .../example_model_comparison.py               |   12 +-
 src/bayesvalidrox.egg-info/SOURCES.txt        |    3 -
 .../bayes_inference/bayes_inference.py        | 1329 ++++++++---------
 .../bayes_inference/bayes_model_comparison.py |  440 +++---
 .../bayes_inference/discrepancy.py            |   16 +-
 src/bayesvalidrox/bayes_inference/mcmc.py     |  508 +++++--
 src/bayesvalidrox/pylink/pylink.py            |    2 +-
 .../surrogate_models/__init__.py              |    9 +-
 .../surrogate_models/desktop.ini              |    2 -
 src/bayesvalidrox/surrogate_models/engine.py  |  536 ++++---
 .../surrogate_models/exp_designs.py           |  141 +-
 .../surrogate_models/exploration.py           |    1 -
 .../surrogate_models/input_space.py           |  113 +-
 src/bayesvalidrox/surrogate_models/inputs.py  |   20 +-
 .../surrogate_models/reg_fast_ard.py          |    2 +-
 .../surrogate_models/surrogate_models.py      |  978 ++++++------
 tests/test_BayesInference.py                  | 1105 --------------
 tests/test_BayesModelComparison.py            |   28 -
 tests/test_Discrepancy.py                     |   18 +-
 tests/test_ExpDesign.py                       |   53 +-
 tests/test_Input.py                           |    6 +-
 tests/test_InputSpace.py                      |  242 ++-
 tests/test_MCMC.py                            |  223 ---
 tests/test_MetaModel.py                       |  844 +++++------
 tests/{test_Engine.py => test_engine.py}      |  768 +++++-----
 45 files changed, 2990 insertions(+), 7370 deletions(-)
 delete mode 100644 .coverage
 delete mode 100644 .coverage.DESKTOP-ATMEKSV.24388.XANLHOVx
 delete mode 100644 .coverage.DESKTOP-ATMEKSV.29708.XilfwCcx
 delete mode 100644 .coverage.DESKTOP-ATMEKSV.33796.XdyCYWmx
 delete mode 100644 .coverage.DESKTOP-ATMEKSV.43284.XuuJaTEx
 delete mode 100644 .coverage.DESKTOP-ATMEKSV.49024.XoHLhHmx
 delete mode 100644 .coverage.DESKTOP-ATMEKSV.50700.XPPOhAcx
 delete mode 100644 .coverage.DESKTOP-ATMEKSV.51388.XJOycQex
 delete mode 100644 .idea/.gitignore
 delete mode 100644 .idea/bayesvalidrox.iml
 delete mode 100644 .idea/inspectionProfiles/profiles_settings.xml
 delete mode 100644 .idea/misc.xml
 delete mode 100644 .idea/modules.xml
 delete mode 100644 .idea/other.xml
 delete mode 100644 .idea/vcs.xml
 delete mode 100644 docs/diagrams/.$Structure_BayesInf.drawio.bkp
 delete mode 100644 docs/diagrams/.$Structure_BayesInf.drawio.dtmp
 delete mode 100644 docs/diagrams/Structure_BayesInf.drawio
 delete mode 100644 examples/.coverage
 delete mode 100644 src/bayesvalidrox/surrogate_models/desktop.ini
 delete mode 100644 tests/test_BayesInference.py
 delete mode 100644 tests/test_BayesModelComparison.py
 delete mode 100644 tests/test_MCMC.py
 rename tests/{test_Engine.py => test_engine.py} (56%)

diff --git a/.coverage b/.coverage
deleted file mode 100644
index 7bd62455cae2ec79af9cbc60f552afe43fca17d4..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 53248
zcmeI5e{>Vaoxn%ZTGHy*$jC-E25W887#d?BydI&U*aQ=B2z3$&Ns5D5OKVx6UuY$;
zj|6*xK4`dHXwqv)lS1#GroFsga=o<SH4xm^>3g(EnqFUWd9PO>&RxR0OR&u)7pK_P
zeP>oHOR`NHY`s3(*=Nt}?9P0@-}%gUW_BdcdedEZ1QU|b9*u{^gs==bkxqvi1py(%
z!>=8FwZjA_oR)xCmvuhXDUa&g|J#nYAe-?KgtysS@FLsO_8Y9fv4#1Et;bCw?}Hoc
zU<4QeMt~9c>n5PwWiz?U%XRx7N{DSCDG?WaQd|w~YwljVdh=Rg^XgmgSSzU4gasA>
zjzx=v)j~YFM~F#rp*<LqgkZ!U^ofaJBp@U@BrSJRmi%O*gKIF=k&N}UJUR0Q{ZJ|)
z1wceB9t?}|E@7wCRYx<VwI!s^gn9vlNWnma<Pa95i!T)7Qo9tFB0fo0tymEBFSIN?
z&}}ifT`t`riPlMs`{2i#QyaK2UX|u{5af%-{lxmVxES$uNOGMZW@_k*f}NcSwYKqS
zcyS^sgn|)T&4?UK1fvl_>XdxRgyf&Jf^`I%b^yHW+JLO>q(a-(l4)I8bjAx$QZk)A
zUAP^Li$pc7;{}OoC`@gRB*Sfhd`B`IBaH(kRK<ztR3)b>n~0_>`zN=}ByBlf-<;;u
zkdqcwOC=-0T}dg^_;tc~$vRq%NVHBMl>iU;oYm-F;nE@SN~u%c6@uq2ub50kY1#{o
z?_COk?O?Il<o0-Uhjvj%A$YuP!GxSS;d7iOb9oBaA@`^D8`4X1orLtFP)LZj(=J0w
z3u@zlH)r>m#TpDTPCA+pO=kp4|E%6dj0GEnwotS!*Vm}W$P#oA7z?y)?a7FbbQPJ@
z!57^v#l?VBpT4UO3x94$0@qo}8{KO?Q@KvY1-wf$rxvB$XmVFn==Sr}jnb`^j!Zc&
zq_yNE!>D#EEQxY5t_>tt8zi+7<D;b3N78qoI*Uw3_v(tNs*}#=U79{ID<;n5E-lq5
zYL6h}68SC4sRX$)-Uad|f<N8-n3(7gCfZIegW<JBM)xhHQyET7@Gd2x@xWF+HTh6E
zr5}btpZpqfOeR;xP2M1NE!r)HU^Iz|!8mkT7=zUrKp+|YB4ifZqR9mHUy`KmN#>q$
zdsPABX^lPPT1c})_r5bK%R!h{$+RVZQM)SCc8D?=h2&bHEgB6;Vnm%AphU9_I)k!6
zZJ0PKHH^!UyNHay?pkV-Ho1^telv7Bt$)EuI4nl|>G_IG!>Uvm`$^{{delJ#3JSH*
zDP`!Qc``Mj>APga(&X32fC-Tr7X4~>CykONaZiUdL7X}pHHir+5e!QL$wG~xj<rXW
z1z%i(n=qFytTkNC4M=Nwmy(dIlFj5%PVwkW?i+8^DamwaRL2>w43m0T)XYvN^*K(G
zcXQldT42D1r=4s+A?&hnkff@C3~p)*wy6}fT<K>W%R;ii*WoJ&e%QeXFanGKBftnS
z0*nA7zz8q`i~u9R2rvSFc?1kPz0O3||9bom!UGVngArf^7y(9r5nu!u0Y-okU<4Qe
zMt~7u1g=j4HoajU*8ap}3Dy}Z-0438SYE%ZewhVorN<W$zKB1*J}bzqW&{`kMt~7u
z1Q-EEfDvE>7y(9r5nu!u0gZsoFi)p_3ZS<bD){t=0Ob4sY6C|2TlhA|kmDa5x7gpb
z2W(@uCv0`rpIJ9serMTb(edBrZ#KVb-f1?NzGYf!e9hQVG*a}nqQ%@f?oqDE@R8x4
z3|m16I~alMo51bW98${He6jT&88#oa#&<?LLLH(gw>Cw6$*>ej$gS&wiS^00);6(A
zl409b&>xR>wk9MQZjsO%>w?XAJ7Fu*#ww8InVzg@s0((($q5j*UI1~W(-OCF&Biq#
z>B}CFR5dM0&5>9#u_*@ovq4^SCCGD6UmjsLZ`#b(cE*|{8MbGGxR$vfu4Y=6H3enZ
z02YIN*j*s-z6ucdg=q=Al{n7Es9y@LiH2ihJSazrsk`QY<iKZ_EXRF$zDPaO8;qoP
zqpNqlVWQCs8}P<e?kES9_s@jNSd=(^JPLc?W!SggnwN&^`N}}ohC=A_LREtauNRcG
zxj{+uOeld(ukmOAw*9Hx<klc@co}pAW`mAR1<@gO#zN6JytjZlM4+^gioAZ>M@Ws@
zOF+?GGf|^_&-3T>9j^=YtSN+^urKU`itLyL3TDKg^DB~<23@{l&^06eoF}_A+n=4F
zq%i(0MFOzJA1V^TphKLAR+-Re#28FpFyz4e6;IOL_MorB0s8JKh(47h*$OYlLDL>P
zXp#$}DH>07L<7-?81lkvvwR&e=6PeuxSR}<nL1<x#Saujaa;;`+eH{`Lt-qX&b}Si
zf&tBF6XSlUSI`2Q?kz~YBzThY!d`H1JP8}NV_gyts#*%7DiD%7>8m$DO~4Fl3Nx6~
z3BxO=ACW>PP*fO~$nSPgEx!@8Z7c}AtkLPdBG7UT&q5QQSky=6*36UhS2&QoVkYcQ
zOU|--lL2JkI1{q7gx;+Op$k8|(5z*{t>m$=S$k+Bji>{e*U*Skrx=cfB)Js^URdM@
z$Qy~gs-pZWpoM(@Uu|$A{FLJ|zry@l(ZgoW95M!rMjhXDthB#n(OcsDaoZx(Q>L42
z$80I%>z4np{@i+xy#uc+TEd;@j&f@a9~+)Ew1N<JFap;(feoBeuA5dz&WvK!U*_Qb
zAJdbSwf?Va=9JRuiA%5lg%(b!nx3Sb^}pwKPI1qGJQ$8Dr_U^V{Xg$cPN|umWx4DB
zx%YF*7p5nW4)hb({}pjg30#BZiL+_`f<jGCvi_g5lT+@W5tVuC{YlpU<$g}tP!L^N
z({P!OQ<`T)N&Z9C`1;@7!6};xqa%0yKf9ocvey5l0ZzGVMrxGrdAaNVlI@(brXYIK
z>wnjb`E!0n^3pI+&6+WP&Xb+J{x2?^KWEne&M2pdGtw#(`pl&3e;nkLdkUjZC7D_O
zJ32W<E{vwDum9~~PI;g(iZd(n@%6v0a6mKK)b+o02dCUyn0n=}|1IsD(oz^z+3SD4
zaAR!l`rlkQm&osSFgTecPT5!(dReor@fx3nCO)y`um6j-a>|Msv47(FpWDnSH_nLc
zEEI-KoU-s5gkEj^uZJ0CBmdc#51(!Q&))xcoiA9K`HTP~zz8q`i~u9R2rvSS03*N%
zFanIg)d?7M24pAS|LgDwf**D;0*nA7zz8q`i~u9R2rvSS03*N%FanIgUmyX4!OqkD
z|E8A^9>!tp$E}V5$NP>d`$hY2?f+^|+E?0bwqMwuvwq!r*z#k`1D4w@CjL#nk3Yqi
zntRPZGJnq;HT%sj)1c`B2w?{!zz8q`i~u9R2o#z?O*N<E*TV}-25&of^5la0>TRv9
zt*1_&ym?oDe_NBYs``d%)Z?gvOqEsKgEiagX4h1gR0)u7f>()*jIKL%{`5di&nGXv
zIM9HE*$1e-yLOpdJViQQsq}Dzk8}>blIr*LH6UlIYV2#>L#|Z6JJs(UIdWpy-7vIx
zADs3r-UpYRXNF&alLNzx$Bv|+jCTa`of&qg`k*{t2|7zExswMC%jdyq3|{hauGqb9
zq<`@k$=Q9_J7VpA=h5LKsXkBtq2X5=;A+c=3uJZ=9q|l-%rn-xP_S$+7isD{V&W@^
zKzJ3$;^Iwnh_N@!8EjaFdfxqYs+iP5fM0DlqE1bc%j7VsQTleyO|1mdDf7!g>i+fR
zgQ<38rvJ$~T}%_um`RiW^?a2hxA0UM^K)gV!A9-SA)-y6PWBkf0E45f1G^y>+=n6O
zMSUMaBTHt(g|^v;%Mr3c?nbRVjdKi63pcJVB^dRkhr3<qJ6|$n6&M*!oj8lo$*<~A
zY*Y6S-+jx1ZfLpuUIT}EkH7hYPi?m&RM)U0@N#K!2`HUkvP3QX#;1BT=Ux|)TJIW6
znb8}^LFju={sJ1{8;@TpLW4^^aKVY%3HA#Y%Py~UIp5&!y=_&h{$XO`n3qROe)?QH
zC-5ky-w@}~ooN0F9&Il|Jx7(6Ss?j9<*dO}_mhx$&yH3O^;}s7r`mDhOe)ooI`Qby
zcaI-@?N@*I)0-|=>V$gRf2}yWeVEr}+(VDv7lwzpn|cmiu8GZEhk)?c@2f)S_`VQo
zq^O8}uO3|~A7A-6H`u?-h0v-+UEiJm>59Mq9C|7+f8%`|I@VXETWLhfuX)tF;I<P-
z-*v?wytO{7;XtY}bz<)ilYH-kFKwym(V9>*7B!lRfu6JYZQcF~Csgg=-#GgQQY)!T
zf;04xjQ{ye#%gHN9x6WLvl&TKpERa#Y4Lc@3Fc&OElHoLaaPHxi)6}9a+2-WT31pl
zM>1<NDfYb5Jv7agV!%{~*F);qlOLQqYkR45@Y#BIlY=;#*)g_Muh&yKX%{vM!nj-_
z0+RH9v#a+nG?FAe^vGOq2jSc7<`x@Vf%nzGk==$QZ63E4IhWfYf6R7sT9?g9+GjH3
zNy)53SZRfVe`__Tw9IAcyS7GhGD)MFRk=NIx2MtqrB_*i<uQ`?1usJ1MCk2RU@KhP
zvcSc|{RO<aoyh8a0&XAp_KVBS#8&g+W$5O|QQ6s3|8jXb6!>WFo_VkS?+-Zi#{YbH
z>B{KnSZl+QXU;zX4*$W~TeiM_@pSFo${G{tYDUxHRCnq%B9a`vpPWHu+t%Lnj;D42
zI?do?uX009|L2=gBQ#<+vi<?O7&&wT>V51R<O~k2wrUDO0io^dXI-q#r>tejb(eIi
zSGx<Pd!7z7U*eCQQ(W!)esZ_!KRxt*?}b_dr)RZ=`~JUq%|;_s-(l?7SwtLVOVQ!6
zT0L6wyQ~4D_mfoWiVO9gc|P^COBG-dv6i+(BNa%3?U}R8NxH3aEQLD1;B+Fhkt0gE
z4wC_5r{M*!V%CHD{Z4pW0NR%-uC8Cz-_SpL>EkQT)S2P#A(&&oG&E4tG_rTk(7^Aj
z#!fF9JH2&e@ACc+-x@gb;@;8vbC*8eJGA?`^CYp)Iq=EL`?~u_&;9#=hXPb>aGoEy
z@`r)c(CEw0rN%r%mmV6rH0m5UcVy2%&AHRAfpY>4%|;!^^UmjWbpJmtLHG*(2!Dv*
z!{_na_&4~M_~-Z({t^CX{0#mMeiA>1kKlv&A-o$$aR(OhSMVl$2VRR;;pKP<UWlu4
z1qfjWBftnS0*nA7zz8q`i~u9R2rvSS03&c65g?xx=&Gw}R8>VIL7<VxL!-({8qJ$W
zqq%cwR8c{rIdf=KUQVO3G8(zvG@3n|Mx~`RDk-6n%SEGEvuIRYOe3e0Mi|q`;h>S-
zP9vL*Mpi40EEXE^JdMoop#vCiGSSFLKlacS710#O(a2z+kzUWiCjj*O|H(dZx{jKQ
znaT(-0*nA7zz8q`i~u9R2rvSS03*N%Fam#s0Qvr(t^fZBBzBt-U<4QeMt~7u1Q-EE
zfDvE>7y(9r5xC9?(Ea~-3;Fy1|HDK01N=Vj$G?X?0Kdhr<FmLApTR$YeE`qnXYqIO
zKjNqH6R;QH5quaQ!256)PQZSE9azHK@m3JR4n}|xU<4QeMt~7u1Q-EEfDvE>7y(A$
zuZn=78eXk$s3L(t0uKo)NidHDb4gG^f;l88CqWqr+$5Myf>IKckibQPStKYXfs+K7
m1P&6|Nnj&^l>`<N@FXykz(fKg35rO-k-$I#JqgJB|NkH7vuQ#A

diff --git a/.coverage.DESKTOP-ATMEKSV.24388.XANLHOVx b/.coverage.DESKTOP-ATMEKSV.24388.XANLHOVx
deleted file mode 100644
index e1f7e6f75f5aa9a7a33884193b67b7cc22082200..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 53248
zcmeI)O>f&a7zc34ZmrmHpcX+O1VIsAHpi&r&d8zIcG$8lhhhb~Zow`C=F&2qXtg9q
zlJjyH2DbwO4A?gq_MP?-b}M$-X{TNGJd$YHNnG?+)cXsuElVUt{`iozqqN^Vee5Ml
z48q8liP$sl7^Z1_DuiJeReG$^LvI%C+|qC8)tuR1v|BaWgTL17Z;g88SHu2(?TNix
z|84F4)j#Tf^_SJ(Em?Kx1U3jj00I#Be+x{W)-7jq(|qw`B6~xXMAB7J_PqA_*9ZHD
z2jX!5v&RP_J0^B&g0|gVu`i<VOpH_{2HsE!FX(%&OuXPoB*#jp9>=QB3mqM!rH&J}
z)BL>G_4<@6QAbq7DDr$6U5MxEqLn7dua>CuBs)NbDDNoX6k;dO-V~7<s7M8_inChm
zc>QLr`SPcl<*cupmnv<Oj9mIzoofvpD4OzUKn1yB)aTmwA{n^HDsBlmZJ`@dJ<pS@
zwNdClNJ266g0z`I>?K|p2z9R9aiaP+Hn7ELdLz8q)=*i`xIxddocXn8bc)P3<jj-j
znFmz4Ak?fyM!qy<PU{?u{T`A3F!o2>Ig}&2a1t)cxhQWE=8b*hzPX_<i}ucSrzYnf
z&2o(c?_{i|o!=5g&Q{utAZ!V41VzAms}<+Nb<?1zROj)@kUnp9<v0n`_g(7zZik-g
zKi^%kob7G%@+1uu=IQplB%bb6=YnQ>ctPm!`NeU=qhvlvcoYqXA{?YchI5Oob10f;
z$IJtbAtN4WBFqy~P2Xy?ab>9vqBjhC^RbpjOsr^t&|ILC4aR}XLnY=mxZ$aa<dJIU
zXR~SHZ3hyCPN!ON4z?GC&NKwNo#}3EvRSd5tu6CKH4US@xAGTjE`)SX-Y|{oQ@)b%
zIMNfz>n2H^qnIW27|G92b9SwYv%j^dIeEHnC*N6_Shp-^W5b+eBZB88e%zjG1Rp7e
zz?FsI&8t6>$+5UzcRoxNZ``gpA8#zGI9H+D;pfWB?@DQvFE>+tpC)~Nl;^6<M~W(c
zsV;U;<&b8Rk@O-OvNQ*0D}dmHeMyOBFB~Un{PNp0JbCRY>YEi%yw~=Ws+lII;r&$@
z#~v-KylllE>7$}?EMuO9;)dvj;ZVsSTN^0HqnNJg#Uic4^;PMUqKx>0%Q!l1q;=AZ
z3s3WhH0bpBq8;CtK|f!wcp1)0rMaI6CtoL<L@1+Z(4dTIh+ZjE7n+~t8B3Sn9+3%`
z>dSsM+__U`N<2GO*NV$lqc3El63<tHlcbeN1M5u2!i^N2q_wo!D8DWYxVO3;e&%n<
zEBsMzvTa(<{rl!*oDarqp6SN4sQXe^JAYf63z91*i+FiL6E1z)nT->+o8jQM*%f$l
z%X+YvDd<%BXPsJ;zu=qpzXttag8&2|009U<00Izz00bZa0SG`~`2@;l$+Y<Uf64yS
zu>YnfY!H9|1Rwwb2tWV=5P$##AOHafyqf~`Qu&^(|HR`1+bnN6`5ys1Z11)AYSgNd
z{mQUk+5fzo4MeRW009U<00Izz00bZa0SG_<0uayw_3}Ma{}!NBD{ocvUjn56!2ga;
zg;xkb00Izz00bZa0SG_<0uX=z1eQ^N@Bh<(`iBhy5P$##AOHafKmY;|fB*y_0D&bI
z!2SOv?p!nt0uX=z1Rwwb2tWV=5P$##Adm~-{y#zi0uX=z1Rwwb2tWV=5P$##Ah7%b
zxc|TWeT)`D00Izz00bZa0SG_<0uX=z1aSW!F#rJwKmY;|fB*y_009U<00IzLegWM7
zU;aKu3n2gj2tWV=5P$##AOHafKmY=`|Bo1e00bZa0SG_<0uX=z1Rwwb2rRz<?*A`;
aAESj3fB*y_009U<00Izz00bZaf&Ty)+a0$6

diff --git a/.coverage.DESKTOP-ATMEKSV.29708.XilfwCcx b/.coverage.DESKTOP-ATMEKSV.29708.XilfwCcx
deleted file mode 100644
index c54e53aa1aa202b1377d33fea299950cda87d2bb..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 49152
zcmeI%%Wfk@6o6rSE_UK1nn6)yjj~8>z_BLD$YKR7AV3x)Mj9>=8zeksyPPz*yPfWK
zE-MrUDH6|scqd+ir{J8v_!=X2Bdp(wo$l(Y>Z-3!RoUwAzIo*(&h*2`w~6_%^t4ni
zmp(J5R4P^Ey(#bFrRR@|6Zv1BI$m~IDIN9y-g@$VX`^aOPk!8b{p4`-kF5_j{@V1{
zf8Y4CYS+8Vijs~10tg_0!2ej_?Ju=zeS5on=Mvi)I2(24Z{v$^PmWJd%<1vxuTG34
znEjfO*Wsag**B5vhEdN8&P*q=LHFFn2gaHxyfQ-<nQj;)?m9`2`%&n>NJ2C40%sD*
z9D9iu2F6{x?kI7+qqNq(H*jsQXFMr*=Ar|=Q}L2J%HHq1v%vi9Zj99RMzg=kO6K>v
zx9~kB^Xz$HZCnuEdsppf+U8*7ccf20jr^hO87-0BI0=`POp)#>n}m61r+TxW_q-{j
zeHXc)>tds6X5Z^I4>FwT{FDC96e-9jrAC2wF_LQX8ypm)B2ouwH-hj$FoUpJYrelx
zseiOxE_p%EUB?##`M%Y*qa;kv+cNm=mV9jf_TE~xzPDGtyGR2?yV&k{NjyGSp9z|A
z@{-We>&xRt4Q7KxM&x)fFk#;$=Wcp%L@bOXQNA8(Z(kG`HgYnyCX~q2QIwXx(`d`v
zU^;`aGZWzw>C(5@$pn#(71{cupsQ0Q);@H@OBdNQca&dU42*XXND?}&^-BF@Z&~P!
zL!jLnAJ%T4RjT#f-SV&NX&B|BmH(^GgitY(<F)5_VUh8jjYm<jkWAJPGmJdnMmM@>
zWb>$~OxLC?rI;i673t1lwNgLcUDlmEU%Qnbtld^0RqH!D<=bpV=oskx@k}Q)WikaO
zJxJ64d-I1jIX8=K*JNpU<8h__$<DHd>#f=?{j9wG;X&HuyJxAsFN?muAI>ycQzlLR
z%H14Z+JUSlL+eE{Wn~S{b^xOpdsZ^rPB==^_|?-iJk!;BY@JoG*nY{?OtZ@r_&SVZ
zPqrW(v-sm8)ilm+tgBGmFr6@z9V*E71}X6}mV0`!N!zfvD}6etBfil(&Mq6NVX?XB
z#5<KqSIjRt@O>Nf^8HGel&n@+`*m{aeX>PF3YvyY%2=l8T%ARw`Bh!9iu#X+vV&=@
zzU^hxT?b{V#;bF;P+hh@d}$Mxc)l~5C2dR^Kvy<4-N?yB*-M*^hyNA^I$G_Pe(JB}
zHT}+VyH~E(pFc0(j`GQvtuyUdHg(??&CX9B%mm5Y#Yw!pmIYV73rxp}+D&ojX?6!)
z+_DkuWC}&D{Hs8%xuwl~svo=%KmY**5I_I{1Q0*~0R#|0U{wY9{lBUsOTQ36009IL
zKmY**5I_I{1Q3`A)bx<&{}T|75I_I{1Q0*~0R#|0009ILSU~}v|F7Vn(j^2CKmY**
z5I_I{1Q0*~0R-j*c>X^J!YKj>Ab<b@2q1s}0tg_000Jv0!1MnV98|i500IagfB*sr
zAb<b@2q1vKoB+@N=Ri0`009ILKmY**5I_I{1Q0-A1qFEizk-8Gmk>Yz0R#|0009IL
zKmY**5SSC-`TraUrwAZ`00IagfB*srAb<b@2&|w0&;M6&Q0Wo^2q1s}0tg_000Iag
ifB*t>0zChp1K|__1Q0*~0R#|0009ILKmdUi6!-_d(ZUx1

diff --git a/.coverage.DESKTOP-ATMEKSV.33796.XdyCYWmx b/.coverage.DESKTOP-ATMEKSV.33796.XdyCYWmx
deleted file mode 100644
index 81e53f831236a6e47a886ccb6403982ccf33b461..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 4096
zcmeIuJqyAx5C-5x1VIqp9DPSc>*62qgKmQOF)MAowgGJ*jXL{x{S}@{7Z-QABS&~i
za)D>NUK&qGtvg2^6V*{#t0|zA(%d!fs$RKX1>RrzS7|j)A1%J6+k3@JN(4ke1Vlgt
zL_h>YKm<fU1Vlgt{!rlF(~a(MsP7ja#V1Pam0Xn3p-gf*M_;kF*o3oPh}~?y4Doe{
yeiK|lfK@?o_BMjIa6Hl-;d25{E{!c<(|7#*08?->C64j<{l2<o3Qrli=HLkvOg+s2

diff --git a/.coverage.DESKTOP-ATMEKSV.43284.XuuJaTEx b/.coverage.DESKTOP-ATMEKSV.43284.XuuJaTEx
deleted file mode 100644
index f723535d998644947972f91fa6e1bdbfa96b41ff..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 32768
zcmeI%%TC)s6o6ryOH2b1Y*2()bXE-!4HD|EU9@S-qAC)-pl&KPn#7aDYHSDF0kTZH
z=u7mac#S?qXM70(T11wS{zy*jnd9R#-;5(!{`~RSOO)t_kuMYR%-XVS+j=F0WmzkF
zThm+dF~?>rK6bHl&+m$L(EU@{{9-xlSJvjY%E@M9<9FrB`qhSi&slUKfB*srAb<b@
z2q1vK!vrQfC8xT(YyWtZ$o4>W!XQzXN&e%!`E=AgJrbwQ*T+X9ZxnlFp+Aj=Xo@Jj
z5JMGByL=_%QKa01G{kd-r_=Rxz%cTB8I8qvHGZ1*70JxivX)WcosE>(%X{kOdc!GI
zo4dB<1+Kb`&j$L*qa{a4m>#$CxYl95vo>*7oa*+rJvmE<()9+)Y^yg0VOst*g0%hN
z<qu`j7k6tn&ARZ~YN`5S`+tR-OSKNocj@QXrL^Q<J88+jN~GDAZ<K6Wu1kKe#*K41
z&{9L`Me$RSgd)=O7P-O%x>5(rb~s8z(pSP9rrsKUrrs>U-THM@y_nq(VH|sUNtii{
zpBJs7)|YWhMxD4O+F>|QGBB$lT@UYKeN8V;FPhin1wGM@WYFoW_~mtucx-a?&TDB+
zS%NIbsV=$`o~uaq%#QENz!d{8P&&1K;><>>-09%5m8KV?sOghpFAQ$wGz--v&3i@I
zo#sCnY7>*ym#)dLpKJA`%w)Xit2^nkGT+KXC7!Q@37MWME@Uh^k<y*|7O2;j?^_0D
zwpxegd-~)&d2Bn?{e62f%5TQZp%rUBUluI+;nIyoa;x)ty_{$|eVfj%6Eo5o4s)1Y
zLEo-=EM^AVnL-hj-^S&7#r&&mJ?yWAC<G8d009ILKmY**5I_I{1Q1wE0iORCbKa;L
z0R#|0009ILKmY**5I_Kdc?8NPnD_tlkP(Xj0tg_000IagfB*srAb`Na3Gn`Z;iim&
z5kLR|1Q0*~0R#|0009ILm|KAN|8tXZ0s;sifB*srAb<b@2q1s}0t+X=`~QWTG73fj
u0R#|0009ILKmY**5I|sV0p9=5O~wfbAb<b@2q1s}0tg_000IasoWNf-a#qd&

diff --git a/.coverage.DESKTOP-ATMEKSV.49024.XoHLhHmx b/.coverage.DESKTOP-ATMEKSV.49024.XoHLhHmx
deleted file mode 100644
index 8c3bd15ceb242ea9c40c1b4be42b077d3d2db13b..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 53248
zcmeI%TW=dh6u@!27duy_D2l8o51AKmtR|@<o`44kkcWy&r4aFgggds!$rkT!vb)aZ
z0im==k&yTXi0{PL;8SqUUVKU7s&AFHf2(%ayEC(MesgBV(f;`2nU^@z4<p|u=F`ff
zO0`<~%9u)}QkVC#yo;B9e^7jp|Em+nvkvQ(-Tr^eD?e41YJXK$ep!CLva|H}@+XV`
zF8T9+F8))u^If^X3jqWWK;Xj`xO%ZvZ>+CZU;mca&cG#+?Yb!YT>j>VgZ;w;bGZNY
zvjdZzGg}KrUOPKx-$dcLIdPHcdjn^@pyzdM;sr-0Id(<qVeERk(aAa4>a^f)T3?Ua
zUQcQz?ns(Ai9Fv%m*!V@xt$is@0PfWBs(FEIPWOX5@su}-ZGKvyT}Dy7iYcL@_Ma>
z)|=lK>W$Ua>a|N_vQbz5Elx!vC&r;X?Mp-5FzRXVJCO~#$1dJBb`)VZlzv_$S+r5;
zKS@F}@PahVAodb342-*Q-C^Q-cLunvXvItLCZmzIUTQ!uvzqz6CUnM?@2Hs<&nx$(
zb3s_Jjw|Y|rE=QmVCZ**{MFDu(ZP`#*^QHMR?S&;lQ0kVoyX>mu^h)cHJk;xj%ZeE
z7<i{cHyZqHGp@Ovh7p9@MuU(H_;|6__-wUWk*stV@##Q*Z?)|(3DfUw8T|I1d@TL(
z(OkW;xmmqFO%p|V+8r;6M+fs$Ni#Y<BX#up?7Y!gGMywkiv|M|_R}e&wM{lSlFgHI
z=1GBJBb{g_%nM3S-)pvMXQdCOGYC7=xt3;3>|}z-T2K`04}-2wl~`lwhG#CaM{YO2
znk@_OJCP)H_U3DigUwl~GfIK>-so`QYQ0u(Y;06t&!=gWk5>L)eJX{Dk-TFWEiU=a
z#>1#sNZz(c7B$AJWHCqbD-zC5z1G;@m=#W5uDzEZ%w5$V)EjGS)vIhq=(?ouhf_i5
znei03u@Ss`_fKqcY;N~mPfN#}4{MDt)@F5FyU^a#&)S=x@1<S7UQhLXS@iXNf2zxR
zX58iP+~v-h9mr~OV!cSFtgOM=4q&uk&q`t235Q9Vzxp*zPu+XQ{mmK}e=qE%R4Xkm
z)BF1{jy>5{b=!(RFV33gv5j>Vikqerh687VY;TYnPh+{K7n`&Xw|Av4$8E%y+Q!jY
zGwoBcx#%)~D3h+3Uvl94Ht6O1m2SgXtFrd%<kb6Qi-=S-O_`LjOwk){7LDdtb;T;$
z-#rl%ZPmBEY`W{9Otg4@>~1xe?MC0)#3i2Zj21~dlP1==jZHUla#8luR`dSb(m+S6
zy{Di0SMr?xl)Kuj)*Fu>SFeWoWX#r?b}XB^Z;Ni{U++yN$&HKSe0eSluKe1W%oDYn
z;Lxwx9dvQaMzE796s7XtIt#7z5Bx_T_RmhDM*sl?5I_I{1Q0*~0R#|0pzH$t{a^MG
zCO!lZKmY**5I_I{1Q0*~0R$?Q1x4WbKdFEK0tg_000IagfB*srAb>!r1$h2n>ft3g
z1Q0*~0R#|0009ILKmY**QURX-lL!bPfB*srAb<b@2q1s}0tl2}fam|^A7dg!009IL
zKmY**5I_I{1Q0-g=l|pY0tg_000IagfB*srAb<b@<rm=jfBDCl2oXR40R#|0009IL
zKmY**5a9VgIe-8H2q1s}0tg_000IagfI#^Lc>Z7hF(yI;5I_I{1Q0*~0R#|0009Je
y{!b1dfB*srAb<b@2q1s}0tg^begU5Umw$|j5CH@bKmY**5I_I{1Q0*~f&T&Oss~K~

diff --git a/.coverage.DESKTOP-ATMEKSV.50700.XPPOhAcx b/.coverage.DESKTOP-ATMEKSV.50700.XPPOhAcx
deleted file mode 100644
index fad3080e3e429bf14717ae3d448a946735a7c872..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 24576
zcmeI%%Wm306b4{B<WixaWD}vTdRBE&LZqm>c9Etni>i^*OVmxJq5%&uDz?eY0AY!`
z=u6}Y`xHIL<`$y#0qAeZW^Npx`DVPZ{Ppvt@tQiBO_ZmX!C?@F!5g9=2ughJ@;N`G
zzCEw-Z@5x^tXK+8JAWz%--4a}yWrqQ<?5ime^+__*l@H%00Izz00bZa0SG_<0{^SP
zbZ>jFdUO;{Z@g;tb;lIv&bu#-i|YnmU%b6+khIc^GVxKb(*@aVOapC;_CzZ;v^qY`
z`%O$H^}5Tw18WjxC-hTKPV&Bi@;&nU))&=bYHo%cSDI_(TC}rUeSH)LCXMy2yXo^Q
zQ&SCnme-q-Li0?nyT86~<thJunx~v-ujDM<OIcc$DSy<H`bhQpZUbekJ0YKu)$Pp2
zBm-l`gH<aVdh$I@Qq2?Q%vbU)0zAy0N7d#wpEBnRJIbWpt9dI$J>{Cpwp~Q6EbD8P
zie;|(!SOwf%Wpc%$)sIsS(UbX+MO?BxQWE*j-ouLB0v%2nv-s4BW+bz3Q3~UnEEEw
z9Gc%ai<#<pF}Y%?cqu93KRJ`7YcXY+CFrb|vd$v@>3~fnYNBF^&+oOoC@T@iJ^dhD
zk>-2lwKs_-8M62*jg_Oe)!fN@s1}v}wGCvonrCvAx8x`C)J~s;d)4FPa5~Jl!`z|i
zm@iRl)vbG*WUX_#U#^&rH~H#5k(sV=NVT{F?{}VytYE89n2(x02jyBt(ma$44hTR1
z0uX=z1Rwwb2tWV=5P$##HdO%c|C_q9m=^>f009U<00Izz00bZa0SG`~DNvRozW*;l
zP=WviAOHafKmY;|fB*y_009VWpa8!AZ{VV0N)Uhm1Rwwb2tWV=5P$##AOL|i0et^o
c13?u65P$##AOHafKmY;|fB*y_uz>=90h)~y#{d8T

diff --git a/.coverage.DESKTOP-ATMEKSV.51388.XJOycQex b/.coverage.DESKTOP-ATMEKSV.51388.XJOycQex
deleted file mode 100644
index e1f7e6f75f5aa9a7a33884193b67b7cc22082200..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 53248
zcmeI)O>f&a7zc34ZmrmHpcX+O1VIsAHpi&r&d8zIcG$8lhhhb~Zow`C=F&2qXtg9q
zlJjyH2DbwO4A?gq_MP?-b}M$-X{TNGJd$YHNnG?+)cXsuElVUt{`iozqqN^Vee5Ml
z48q8liP$sl7^Z1_DuiJeReG$^LvI%C+|qC8)tuR1v|BaWgTL17Z;g88SHu2(?TNix
z|84F4)j#Tf^_SJ(Em?Kx1U3jj00I#Be+x{W)-7jq(|qw`B6~xXMAB7J_PqA_*9ZHD
z2jX!5v&RP_J0^B&g0|gVu`i<VOpH_{2HsE!FX(%&OuXPoB*#jp9>=QB3mqM!rH&J}
z)BL>G_4<@6QAbq7DDr$6U5MxEqLn7dua>CuBs)NbDDNoX6k;dO-V~7<s7M8_inChm
zc>QLr`SPcl<*cupmnv<Oj9mIzoofvpD4OzUKn1yB)aTmwA{n^HDsBlmZJ`@dJ<pS@
zwNdClNJ266g0z`I>?K|p2z9R9aiaP+Hn7ELdLz8q)=*i`xIxddocXn8bc)P3<jj-j
znFmz4Ak?fyM!qy<PU{?u{T`A3F!o2>Ig}&2a1t)cxhQWE=8b*hzPX_<i}ucSrzYnf
z&2o(c?_{i|o!=5g&Q{utAZ!V41VzAms}<+Nb<?1zROj)@kUnp9<v0n`_g(7zZik-g
zKi^%kob7G%@+1uu=IQplB%bb6=YnQ>ctPm!`NeU=qhvlvcoYqXA{?YchI5Oob10f;
z$IJtbAtN4WBFqy~P2Xy?ab>9vqBjhC^RbpjOsr^t&|ILC4aR}XLnY=mxZ$aa<dJIU
zXR~SHZ3hyCPN!ON4z?GC&NKwNo#}3EvRSd5tu6CKH4US@xAGTjE`)SX-Y|{oQ@)b%
zIMNfz>n2H^qnIW27|G92b9SwYv%j^dIeEHnC*N6_Shp-^W5b+eBZB88e%zjG1Rp7e
zz?FsI&8t6>$+5UzcRoxNZ``gpA8#zGI9H+D;pfWB?@DQvFE>+tpC)~Nl;^6<M~W(c
zsV;U;<&b8Rk@O-OvNQ*0D}dmHeMyOBFB~Un{PNp0JbCRY>YEi%yw~=Ws+lII;r&$@
z#~v-KylllE>7$}?EMuO9;)dvj;ZVsSTN^0HqnNJg#Uic4^;PMUqKx>0%Q!l1q;=AZ
z3s3WhH0bpBq8;CtK|f!wcp1)0rMaI6CtoL<L@1+Z(4dTIh+ZjE7n+~t8B3Sn9+3%`
z>dSsM+__U`N<2GO*NV$lqc3El63<tHlcbeN1M5u2!i^N2q_wo!D8DWYxVO3;e&%n<
zEBsMzvTa(<{rl!*oDarqp6SN4sQXe^JAYf63z91*i+FiL6E1z)nT->+o8jQM*%f$l
z%X+YvDd<%BXPsJ;zu=qpzXttag8&2|009U<00Izz00bZa0SG`~`2@;l$+Y<Uf64yS
zu>YnfY!H9|1Rwwb2tWV=5P$##AOHafyqf~`Qu&^(|HR`1+bnN6`5ys1Z11)AYSgNd
z{mQUk+5fzo4MeRW009U<00Izz00bZa0SG_<0uayw_3}Ma{}!NBD{ocvUjn56!2ga;
zg;xkb00Izz00bZa0SG_<0uX=z1eQ^N@Bh<(`iBhy5P$##AOHafKmY;|fB*y_0D&bI
z!2SOv?p!nt0uX=z1Rwwb2tWV=5P$##Adm~-{y#zi0uX=z1Rwwb2tWV=5P$##Ah7%b
zxc|TWeT)`D00Izz00bZa0SG_<0uX=z1aSW!F#rJwKmY;|fB*y_009U<00IzLegWM7
zU;aKu3n2gj2tWV=5P$##AOHafKmY=`|Bo1e00bZa0SG_<0uX=z1Rwwb2rRz<?*A`;
aAESj3fB*y_009U<00Izz00bZaf&Ty)+a0$6

diff --git a/.idea/.gitignore b/.idea/.gitignore
deleted file mode 100644
index 13566b81b..000000000
--- a/.idea/.gitignore
+++ /dev/null
@@ -1,8 +0,0 @@
-# Default ignored files
-/shelf/
-/workspace.xml
-# Editor-based HTTP Client requests
-/httpRequests/
-# Datasource local storage ignored files
-/dataSources/
-/dataSources.local.xml
diff --git a/.idea/bayesvalidrox.iml b/.idea/bayesvalidrox.iml
deleted file mode 100644
index fab03b6ec..000000000
--- a/.idea/bayesvalidrox.iml
+++ /dev/null
@@ -1,18 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="PYTHON_MODULE" version="4">
-  <component name="NewModuleRootManager">
-    <content url="file://$MODULE_DIR$">
-      <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
-      <sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
-    </content>
-    <orderEntry type="inheritedJdk" />
-    <orderEntry type="sourceFolder" forTests="false" />
-  </component>
-  <component name="PyDocumentationSettings">
-    <option name="format" value="NUMPY" />
-    <option name="myDocStringFormat" value="NumPy" />
-  </component>
-  <component name="TestRunnerService">
-    <option name="PROJECT_TEST_RUNNER" value="py.test" />
-  </component>
-</module>
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
deleted file mode 100644
index 105ce2da2..000000000
--- a/.idea/inspectionProfiles/profiles_settings.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-<component name="InspectionProjectProfileManager">
-  <settings>
-    <option name="USE_PROJECT_PROFILE" value="false" />
-    <version value="1.0" />
-  </settings>
-</component>
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
deleted file mode 100644
index a6218fed0..000000000
--- a/.idea/misc.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="Black">
-    <option name="sdkName" value="Python 3.11" />
-  </component>
-  <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.11" project-jdk-type="Python SDK" />
-</project>
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
deleted file mode 100644
index 482c611f3..000000000
--- a/.idea/modules.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="ProjectModuleManager">
-    <modules>
-      <module fileurl="file://$PROJECT_DIR$/.idea/bayesvalidrox.iml" filepath="$PROJECT_DIR$/.idea/bayesvalidrox.iml" />
-    </modules>
-  </component>
-</project>
\ No newline at end of file
diff --git a/.idea/other.xml b/.idea/other.xml
deleted file mode 100644
index 2e75c2e2a..000000000
--- a/.idea/other.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="PySciProjectComponent">
-    <option name="PY_INTERACTIVE_PLOTS_SUGGESTED" value="true" />
-  </component>
-</project>
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
deleted file mode 100644
index 35eb1ddfb..000000000
--- a/.idea/vcs.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="VcsDirectoryMappings">
-    <mapping directory="" vcs="Git" />
-  </component>
-</project>
\ No newline at end of file
diff --git a/docs/diagrams/.$Structure_BayesInf.drawio.bkp b/docs/diagrams/.$Structure_BayesInf.drawio.bkp
deleted file mode 100644
index 3ba4ed263..000000000
--- a/docs/diagrams/.$Structure_BayesInf.drawio.bkp
+++ /dev/null
@@ -1,908 +0,0 @@
-<mxfile host="Electron" modified="2024-04-19T15:13:43.060Z" agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/22.1.11 Chrome/114.0.5735.289 Electron/25.9.8 Safari/537.36" etag="DE_l5njUGrsyMG_jufaX" version="22.1.11" type="device" pages="3">
-  <diagram name="Class and function structure" id="efOe0Jku58RX-i1bv-3b">
-    <mxGraphModel dx="2718" dy="686" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-22" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;MCMC&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1270" y="360" width="770" height="380" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-1" value="_kernel_rbf" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1020" y="200" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-2" value="_logpdf" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="820" y="140" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-10" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;BayesInf&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-120" y="290" width="1310" height="680" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-24" value="if self.bootstrap &lt;br&gt;or self.bayes_loocv &lt;br&gt;or self.just_analysis" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;labelBackgroundColor=#ffae00;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-13">
-          <mxGeometry x="0.2902" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-31" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-18">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-42" value="if self.name != &#39;valid&#39;&lt;br&gt;and self.inference_method != &#39;rejection&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=default;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-31">
-          <mxGeometry x="0.5646" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-32" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="HiMKSJFquRK0mIlwyRFI-5">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-43" value="if self.inference_method == &#39;mcmc&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-32">
-          <mxGeometry x="-0.0958" y="-1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-33" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-19">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-52" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#C2C2C2;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-33">
-          <mxGeometry x="-0.112" y="1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-34" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-47" value="if self.plot_post_pred" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-34">
-          <mxGeometry x="0.2399" y="-1" relative="1" as="geometry">
-            <mxPoint y="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-35" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-20">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-46" value="if self.plot_map_pred" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-35">
-          <mxGeometry x="0.4183" y="-1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-54" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-55" value="if self.bootstrap" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-54">
-          <mxGeometry x="0.1816" y="3" relative="1" as="geometry">
-            <mxPoint x="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-57" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-56">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-58" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-57">
-          <mxGeometry x="0.7182" y="2" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-60" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-59">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-61" value="if self.error_model&lt;br&gt;and self.name == &#39;calib&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-60">
-          <mxGeometry x="0.3024" y="2" relative="1" as="geometry">
-            <mxPoint x="67" y="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-54" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="HiMKSJFquRK0mIlwyRFI-51">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-55" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-54">
-          <mxGeometry x="0.8253" y="3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-9" value="create_inference" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="405" y="539" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-25" value="if len(self.perturbed_data) == 0" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-14">
-          <mxGeometry x="0.3402" relative="1" as="geometry">
-            <mxPoint y="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-27" value="if not self.emulator" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-15">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-29" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-16">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-44" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#cdcbcb;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-29">
-          <mxGeometry x="0.4722" y="1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-30" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-17">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-41" value="if self.emulator" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-30">
-          <mxGeometry x="0.6143" y="-3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-62" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-59">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="340" y="680" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-63" value="if self.error_model&lt;br&gt;and self.name == &#39;valid&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=default;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-62">
-          <mxGeometry x="-0.3906" relative="1" as="geometry">
-            <mxPoint y="121" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-13" value="perform_bootstrap" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="50" y="335" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-14" value="_perturb_data" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-75" y="460" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-15" value="_eval_model" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1050" y="660" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-38" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-16" target="xary-zVek9Bg-A1b1ZmA-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-49" value="if hasattr bias_inputs&amp;nbsp;&lt;br&gt;and not hasattr error_model" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#ffae00;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-38">
-          <mxGeometry x="0.3126" y="-3" relative="1" as="geometry">
-            <mxPoint x="-103" y="31" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-39" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-16" target="xary-zVek9Bg-A1b1ZmA-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-16" value="normpdf" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="650" y="455" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-40" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-17" target="xary-zVek9Bg-A1b1ZmA-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-50" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#cdcbcb;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-40">
-          <mxGeometry x="-0.6073" y="-5" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-17" value="_corr_factor_BME" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="650" y="385" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-18" value="_rejection_sampling" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="280" y="890" width="120" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-26" value="if not self.emulator&amp;nbsp;&lt;br&gt;and not self.inference_method == &#39;rejection&#39;&amp;nbsp;&lt;br&gt;and self.name == &#39;calib" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-19" target="xary-zVek9Bg-A1b1ZmA-15">
-          <mxGeometry x="-0.0559" y="15" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-19" target="xary-zVek9Bg-A1b1ZmA-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-48" value="if sigma2_prior is not None&lt;br&gt;and if hasattr bias_inputs&lt;br&gt;and if not hasattr error_model" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#ffae00;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-37">
-          <mxGeometry x="-0.5544" y="-1" relative="1" as="geometry">
-            <mxPoint x="1" y="-5" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-19" value="_posterior_predictive" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="690" y="589" width="130" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-28" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-20" target="xary-zVek9Bg-A1b1ZmA-15">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-45" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#cdcbcb;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-28">
-          <mxGeometry x="0.0517" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-20" value="_plot_max_a_posteriori" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="495" y="790" width="140" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-21" value="plot_post_predictive" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="630" y="720" width="120" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-36" value="Note: Arrows indicate function calls, beginning calls the end" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
-          <mxGeometry x="10" y="10" width="190" height="30" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-51" value="Color meanings:&lt;br&gt;&lt;span style=&quot;white-space: pre;&quot;&gt;&#x9;&lt;/span&gt;red: wrong, change&lt;br&gt;&lt;span style=&quot;white-space: pre;&quot;&gt;&#x9;&lt;/span&gt;orange: seems off, look at again&lt;br&gt;&lt;span style=&quot;white-space: pre;&quot;&gt;&#x9;&lt;/span&gt;light beige: has been removed" style="text;html=1;strokeColor=none;fillColor=none;align=left;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
-          <mxGeometry x="20" y="70" width="220" height="30" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-53" value="plot_log_BME" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="150" y="820" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-56" value="plot_post_params" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="660" y="840" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-59" value="create_error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="45" y="740" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-1" value="_check_ranges" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1595" y="280" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-2" value="gelman_rubin" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1350" y="250" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-3" value="_iterative_scheme" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="2055" y="620" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-21" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-4" target="HiMKSJFquRK0mIlwyRFI-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-24" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-4" target="HiMKSJFquRK0mIlwyRFI-11">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-4" value="_my_ESS" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="1350" y="100" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-14" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-8">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-19" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-10">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-22" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-53" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-52">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-56" value="if opts_sigma != &#39;B&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-53">
-          <mxGeometry x="0.7377" y="1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-5" value="run_sampler" style="rounded=0;whiteSpace=wrap;html=1;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="1350" y="534" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-20" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-6" target="HiMKSJFquRK0mIlwyRFI-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-6" value="log_prior" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1595" y="510" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-15" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-7" target="HiMKSJFquRK0mIlwyRFI-9">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-16" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-15">
-          <mxGeometry x="0.0246" y="2" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-7" value="log_likelihood" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1760" y="539" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-12" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-8" target="HiMKSJFquRK0mIlwyRFI-6">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-17" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-12">
-          <mxGeometry x="0.4587" y="4" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-13" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-8" target="HiMKSJFquRK0mIlwyRFI-7">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-18" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-13">
-          <mxGeometry x="0.6826" y="4" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-8" value="log_posterior" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1480" y="610" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-9" value="eval_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1760" y="400" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-10" value="train_error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1450" y="420" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-23" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-11" target="HiMKSJFquRK0mIlwyRFI-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-11" value="marginal_llk_emcee" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="1870" y="620" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-25" value="Never used!" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="1880" y="680" width="100" height="30" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-26" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;BayesModelComp&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-1060" y="380" width="840" height="420" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-9" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-27" target="HC1H8j6nMwEtLoyIrXXk-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-13" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.75;entryDx=0;entryDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-27" target="HC1H8j6nMwEtLoyIrXXk-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-14" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-27" target="HiMKSJFquRK0mIlwyRFI-31">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-27" value="model_comparison_all" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="-860" y="566" width="160" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-42" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="xary-zVek9Bg-A1b1ZmA-9">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="-630" y="564" as="sourcePoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-47" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-42">
-          <mxGeometry x="-0.4883" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-29" target="HiMKSJFquRK0mIlwyRFI-30">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-49" value="if perturbed_data is None" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-37">
-          <mxGeometry x="-0.0507" y="4" relative="1" as="geometry">
-            <mxPoint x="-1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-29" value="generate_dataset" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-510" y="566" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-30" value="_perturb_data" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-340" y="636" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-6" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-31" target="HC1H8j6nMwEtLoyIrXXk-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-10" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-31" target="HiMKSJFquRK0mIlwyRFI-33">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-11" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-31" target="HC1H8j6nMwEtLoyIrXXk-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-31" value="cal_model_weight" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-835" y="466" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-32" value="plot_just_analysis" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-835" y="736" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-33" value="plot_model_weights" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-980" y="416" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-34" value="plot_bayes_factor" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-410" y="431" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-51" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;Discrepancy&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="360" y="1039.82" width="200" height="130" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-52" value="get_sample" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="400" y="1079.82" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-5" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.25;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="HiMKSJFquRK0mIlwyRFI-34">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-20" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="HC1H8j6nMwEtLoyIrXXk-17">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-21" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="HiMKSJFquRK0mIlwyRFI-29">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-1" value="calc_bayes_factors" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-630" y="466" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-2" value="calc_model_weights" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-1030" y="566" width="130" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-4" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HiMKSJFquRK0mIlwyRFI-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-12" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HC1H8j6nMwEtLoyIrXXk-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-16" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="xary-zVek9Bg-A1b1ZmA-9">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-23" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HC1H8j6nMwEtLoyIrXXk-16">
-          <mxGeometry x="-0.5478" y="3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-18" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.25;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HC1H8j6nMwEtLoyIrXXk-17">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-22" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HiMKSJFquRK0mIlwyRFI-29">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-3" value="calc_justifiability_analysis" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-860" y="666" width="160" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-17" value="setup" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-630" y="566" width="110" height="50" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-  <diagram id="sQf09xvhinkT827TE7Va" name="Function structure Engine">
-    <mxGraphModel dx="1436" dy="968" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-1" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;Engine&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="130" y="140" width="1390" height="690" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-2" value="hellinger_distance" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1340" y="50" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-3" value="logpdf" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1050" y="50" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-4" value="subdomain" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="625" y="50" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-5" value="start_engine" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="250" y="680" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-32" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-6" target="JXjM7l_erEiZMkSmYBvl-5">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-6" value="train_normal" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="170" y="420" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-10" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-7" target="JXjM7l_erEiZMkSmYBvl-9">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="335" y="335" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-33" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-7" target="JXjM7l_erEiZMkSmYBvl-6">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-7" value="train_sequential" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="170" y="310" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-8" value="eval_metamodel" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="190" y="210" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-7" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-18">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-19" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-23">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-20" value="if len(obs_data) != 0" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-19">
-          <mxGeometry x="0.8137" relative="1" as="geometry">
-            <mxPoint x="-57" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-21" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.25;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-24">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-22" value="if len(obs_data) != 0" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-21">
-          <mxGeometry x="0.7684" y="3" relative="1" as="geometry">
-            <mxPoint x="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-23" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-25">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-24" value="if expdes.valid_model_runs" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-23">
-          <mxGeometry x="0.606" y="3" relative="1" as="geometry">
-            <mxPoint x="-16" y="3" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-25" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-26">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-26" value="if mc_ref and pce" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-25">
-          <mxGeometry x="0.7094" y="-3" relative="1" as="geometry">
-            <mxPoint x="-31" y="-3" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-9" value="train_seq_design" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="315" y="310" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-12" value="util_VarBasedDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="670" y="648" width="130" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-28" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-13" target="JXjM7l_erEiZMkSmYBvl-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-31" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-13" target="JXjM7l_erEiZMkSmYBvl-5">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-38" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-15" target="JXjM7l_erEiZMkSmYBvl-13">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-39" value="if method == &#39;bayesactdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=default;" vertex="1" connectable="0" parent="JXjM7l_erEiZMkSmYBvl-38">
-          <mxGeometry x="-0.6235" y="2" relative="1" as="geometry">
-            <mxPoint x="289" y="2" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-12" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-13" target="JXjM7l_erEiZMkSmYBvl-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-15" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-12">
-          <mxGeometry x="0.7865" y="4" relative="1" as="geometry">
-            <mxPoint x="-91" y="185" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-13" value="util_BayesianActiveDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1020" y="680" width="150" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-34" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-14" target="JXjM7l_erEiZMkSmYBvl-6">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-13" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-14" target="JXjM7l_erEiZMkSmYBvl-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-16" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-13">
-          <mxGeometry x="0.197" y="-3" relative="1" as="geometry">
-            <mxPoint x="-1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-14" value="utilBayesianDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="880" y="730" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-15" target="JXjM7l_erEiZMkSmYBvl-12">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-42" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-15" target="JXjM7l_erEiZMkSmYBvl-14">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-43" value="if method == &#39;bayesoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="JXjM7l_erEiZMkSmYBvl-42">
-          <mxGeometry x="0.6143" y="-3" relative="1" as="geometry">
-            <mxPoint x="3" y="29" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-15" value="run_util_func" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="660" y="450" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-36" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-16" target="JXjM7l_erEiZMkSmYBvl-12">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-41" value="if method == &#39;varoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="JXjM7l_erEiZMkSmYBvl-36">
-          <mxGeometry x="-0.5992" relative="1" as="geometry">
-            <mxPoint x="-197" y="62" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-44" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-16" target="JXjM7l_erEiZMkSmYBvl-13">
-          <mxGeometry relative="1" as="geometry">
-            <Array as="points">
-              <mxPoint x="965" y="590" />
-              <mxPoint x="1095" y="590" />
-            </Array>
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-27" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-16" target="JXjM7l_erEiZMkSmYBvl-14">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-16" value="dual_annealing" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="910" y="450" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-5" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-17" target="JXjM7l_erEiZMkSmYBvl-18">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-6" value="if exploit _method is &#39;bayesoptdesign&#39;,&lt;br style=&quot;border-color: var(--border-color);&quot;&gt;&#39;bayesactdesign&#39; or &#39;varoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-5">
-          <mxGeometry x="0.1312" y="2" relative="1" as="geometry">
-            <mxPoint x="17" y="-2" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-17" value="tradeoff_weights" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="980" y="210" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-30" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-4">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-1" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-15">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="790" y="280.0000000000002" as="sourcePoint" />
-            <mxPoint x="690" y="499.9999999999998" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-2" value="if exploit _method is &#39;bayesoptdesign&#39;,&lt;br&gt;&#39;bayesactdesign&#39; or &#39;varoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-1">
-          <mxGeometry x="0.1579" relative="1" as="geometry">
-            <mxPoint x="-15" y="49" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-3" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-16">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="680" y="205.05882352941194" as="sourcePoint" />
-            <mxPoint x="805" y="779.9999999999998" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-4" value="if explore_method == &#39;dual annealing&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-3">
-          <mxGeometry x="-0.6061" relative="1" as="geometry">
-            <mxPoint x="270" y="46" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-9" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-20">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-10" value="if exploit_method == &#39;alphabetic&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-9">
-          <mxGeometry x="0.8144" y="1" relative="1" as="geometry">
-            <mxPoint x="74" y="-1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-18" value="choose_next_sample" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="610" y="210" width="140" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-20" value="util_AlphOptDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="330" y="210" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-21" value="_normpdf" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1340" y="430" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-29" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-22" target="JXjM7l_erEiZMkSmYBvl-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-22" value="_corr_factor_BME" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1130" y="220" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-23" value="_posteriorPlot" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="520" y="440" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-27" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-24" target="JXjM7l_erEiZMkSmYBvl-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-11" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-24" target="JXjM7l_erEiZMkSmYBvl-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-14" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-11">
-          <mxGeometry x="0.0929" y="-1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-17" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-24" target="JXjM7l_erEiZMkSmYBvl-22">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-18" value="commented out?" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-17">
-          <mxGeometry x="-0.1477" y="3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-24" value="_BME_Calculator" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1340" y="220" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-25" value="_validError" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="520" y="510" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-26" value="_error_Mean_Std" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="520" y="580" width="110" height="50" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-  <diagram id="ME5gyYpVqUByTnAIOcMV" name="Parameter and function interaction">
-    <mxGraphModel dx="2049" dy="1366" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-33" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-1" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-54" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-1" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-61" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-1" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-1" value="engine" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="160" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-3" value="Discrepancy" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="240" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-71" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-4" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-4" value="emulator" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="320" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-5" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-57" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-5" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-65" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-5" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-5" value="name" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="400" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-47" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-6" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-6" value="bootstrap" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="480" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-7" value="req_outputs" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="560" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-79" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-8" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-8" value="selected_indices" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="640" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-35" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-9" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-55" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-9" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-67" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-9" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-9" value="prior_samples" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="720" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-36" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-11" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-68" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-11" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-11" value="n_prior_samples" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="800" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-38" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-12" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-80" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-12" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-12" value="measured_data" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="880" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-58" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-13" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-13" value="inference_method" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="960" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-14" value="mcmc_params" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1040" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-63" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-15" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-15" value="perturbed_data" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1120" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-45" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-16" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-77" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-16" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-16" value="bayes_loocv" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1200" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-64" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-17" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-17" value="n_bootstrap_itrs" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1280" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-18" value="bootstrap_noise" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1360" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-46" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-19" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-78" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-19" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-19" value="just_analysis" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1440" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-20" value="valid_metrics" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1520" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-52" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-21" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-21" value="plot_post_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1600" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-51" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-22" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-22" value="plot_map_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1680" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-23" value="max_a_posteriori" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1760" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-24" value="corner_title_fmt" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1840" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-34" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-25" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-25" value="out_dir" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1920" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-50" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-26" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-66" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-26" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-26" value="error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2000" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-56" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-27" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-72" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-27" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-27" value="bias_inputs" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2080" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-41" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-28" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-28" value="measurement_error" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2160" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-44" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-29" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-81" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-29" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-29" value="sigma2s" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2240" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-30" value="log_likes" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2320" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-82" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-31" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-31" value="dtype" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2400" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-32" value="create_inference" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="400" y="20" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-40" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-39" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-39" value="n_tot_measurement" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2480" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-43" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-42" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-42" value="Discrepancy" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2560" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-49" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-48" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-59" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-48" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-48" value="posterior_df" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2640" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-53" value="create_error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="560" y="20" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-60" value="perform_bootstrap" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="720" y="20" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-75" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-69" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-69" value="__mean_pce_prior_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2720" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-76" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-70" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-70" value="_std_pce_prior_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2800" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-74" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-73" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-73" value="__model_prior_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2880" width="120" height="60" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-</mxfile>
diff --git a/docs/diagrams/.$Structure_BayesInf.drawio.dtmp b/docs/diagrams/.$Structure_BayesInf.drawio.dtmp
deleted file mode 100644
index 14663ecb0..000000000
--- a/docs/diagrams/.$Structure_BayesInf.drawio.dtmp
+++ /dev/null
@@ -1,964 +0,0 @@
-<mxfile host="Electron" modified="2024-04-19T16:08:46.718Z" agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/22.1.11 Chrome/114.0.5735.289 Electron/25.9.8 Safari/537.36" etag="QkifmTMxwBp7UqUSeBiS" version="22.1.11" type="device" pages="4">
-  <diagram name="Class and function structure" id="efOe0Jku58RX-i1bv-3b">
-    <mxGraphModel dx="3735" dy="1372" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-22" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;MCMC&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1270" y="360" width="770" height="380" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-1" value="_kernel_rbf" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1020" y="200" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-2" value="_logpdf" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="820" y="140" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-10" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;BayesInf&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-120" y="290" width="1310" height="680" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-24" value="if self.bootstrap &lt;br&gt;or self.bayes_loocv &lt;br&gt;or self.just_analysis" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;labelBackgroundColor=#ffae00;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-13">
-          <mxGeometry x="0.2902" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-31" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-18">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-42" value="if self.name != &#39;valid&#39;&lt;br&gt;and self.inference_method != &#39;rejection&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=default;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-31">
-          <mxGeometry x="0.5646" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-32" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="HiMKSJFquRK0mIlwyRFI-5">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-43" value="if self.inference_method == &#39;mcmc&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-32">
-          <mxGeometry x="-0.0958" y="-1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-33" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-19">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-52" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#C2C2C2;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-33">
-          <mxGeometry x="-0.112" y="1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-34" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-47" value="if self.plot_post_pred" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-34">
-          <mxGeometry x="0.2399" y="-1" relative="1" as="geometry">
-            <mxPoint y="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-35" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-20">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-46" value="if self.plot_map_pred" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-35">
-          <mxGeometry x="0.4183" y="-1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-54" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-55" value="if self.bootstrap" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-54">
-          <mxGeometry x="0.1816" y="3" relative="1" as="geometry">
-            <mxPoint x="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-57" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-56">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-58" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-57">
-          <mxGeometry x="0.7182" y="2" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-60" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-59">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-61" value="if self.error_model&lt;br&gt;and self.name == &#39;calib&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-60">
-          <mxGeometry x="0.3024" y="2" relative="1" as="geometry">
-            <mxPoint x="67" y="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-54" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="HiMKSJFquRK0mIlwyRFI-51">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-55" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-54">
-          <mxGeometry x="0.8253" y="3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-9" value="create_inference" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="405" y="539" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-25" value="if len(self.perturbed_data) == 0" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-14">
-          <mxGeometry x="0.3402" relative="1" as="geometry">
-            <mxPoint y="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-27" value="if not self.emulator" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-15">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-29" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-16">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-44" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#cdcbcb;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-29">
-          <mxGeometry x="0.4722" y="1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-30" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-17">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-41" value="if self.emulator" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-30">
-          <mxGeometry x="0.6143" y="-3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-62" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-59">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="340" y="680" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-63" value="if self.error_model&lt;br&gt;and self.name == &#39;valid&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=default;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-62">
-          <mxGeometry x="-0.3906" relative="1" as="geometry">
-            <mxPoint y="121" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-13" value="perform_bootstrap" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="50" y="335" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-14" value="_perturb_data" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-75" y="460" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-15" value="_eval_model" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1050" y="660" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-38" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-16" target="xary-zVek9Bg-A1b1ZmA-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-49" value="if hasattr bias_inputs&amp;nbsp;&lt;br&gt;and not hasattr error_model" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#ffae00;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-38">
-          <mxGeometry x="0.3126" y="-3" relative="1" as="geometry">
-            <mxPoint x="-103" y="31" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-39" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-16" target="xary-zVek9Bg-A1b1ZmA-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-16" value="normpdf" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="650" y="455" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-40" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-17" target="xary-zVek9Bg-A1b1ZmA-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-50" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#cdcbcb;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-40">
-          <mxGeometry x="-0.6073" y="-5" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-17" value="_corr_factor_BME" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="650" y="385" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-18" value="_rejection_sampling" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="280" y="890" width="120" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-26" value="if not self.emulator&amp;nbsp;&lt;br&gt;and not self.inference_method == &#39;rejection&#39;&amp;nbsp;&lt;br&gt;and self.name == &#39;calib" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-19" target="xary-zVek9Bg-A1b1ZmA-15">
-          <mxGeometry x="-0.0559" y="15" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-19" target="xary-zVek9Bg-A1b1ZmA-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-48" value="if sigma2_prior is not None&lt;br&gt;and if hasattr bias_inputs&lt;br&gt;and if not hasattr error_model" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#ffae00;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-37">
-          <mxGeometry x="-0.5544" y="-1" relative="1" as="geometry">
-            <mxPoint x="1" y="-5" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-19" value="_posterior_predictive" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="690" y="589" width="130" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-28" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-20" target="xary-zVek9Bg-A1b1ZmA-15">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-45" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#cdcbcb;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-28">
-          <mxGeometry x="0.0517" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-20" value="_plot_max_a_posteriori" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="495" y="790" width="140" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-21" value="plot_post_predictive" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="630" y="720" width="120" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-36" value="Note: Arrows indicate function calls, beginning calls the end" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
-          <mxGeometry x="10" y="10" width="190" height="30" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-51" value="Color meanings:&lt;br&gt;&lt;span style=&quot;white-space: pre;&quot;&gt;&#x9;&lt;/span&gt;red: wrong, change&lt;br&gt;&lt;span style=&quot;white-space: pre;&quot;&gt;&#x9;&lt;/span&gt;orange: seems off, look at again&lt;br&gt;&lt;span style=&quot;white-space: pre;&quot;&gt;&#x9;&lt;/span&gt;light beige: has been removed" style="text;html=1;strokeColor=none;fillColor=none;align=left;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
-          <mxGeometry x="20" y="70" width="220" height="30" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-53" value="plot_log_BME" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="150" y="820" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-56" value="plot_post_params" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="660" y="840" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-59" value="create_error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="45" y="740" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-1" value="_check_ranges" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1595" y="280" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-2" value="gelman_rubin" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1350" y="250" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-3" value="_iterative_scheme" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="2055" y="620" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-21" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-4" target="HiMKSJFquRK0mIlwyRFI-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-24" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-4" target="HiMKSJFquRK0mIlwyRFI-11">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-4" value="_my_ESS" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="1350" y="100" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-14" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-8">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-19" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-10">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-22" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-53" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-52">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-56" value="if opts_sigma != &#39;B&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-53">
-          <mxGeometry x="0.7377" y="1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-5" value="run_sampler" style="rounded=0;whiteSpace=wrap;html=1;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="1350" y="534" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-20" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-6" target="HiMKSJFquRK0mIlwyRFI-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-6" value="log_prior" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1595" y="510" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-15" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-7" target="HiMKSJFquRK0mIlwyRFI-9">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-16" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-15">
-          <mxGeometry x="0.0246" y="2" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-7" value="log_likelihood" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1760" y="539" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-12" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-8" target="HiMKSJFquRK0mIlwyRFI-6">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-17" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-12">
-          <mxGeometry x="0.4587" y="4" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-13" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-8" target="HiMKSJFquRK0mIlwyRFI-7">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-18" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-13">
-          <mxGeometry x="0.6826" y="4" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-8" value="log_posterior" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1480" y="610" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-9" value="eval_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1760" y="400" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-10" value="train_error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1450" y="420" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-23" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-11" target="HiMKSJFquRK0mIlwyRFI-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-11" value="marginal_llk_emcee" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="1870" y="620" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-25" value="Never used!" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="1880" y="680" width="100" height="30" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-26" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;BayesModelComp&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-1096" y="380" width="840" height="420" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-9" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-27" target="HC1H8j6nMwEtLoyIrXXk-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-13" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.75;entryDx=0;entryDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-27" target="HC1H8j6nMwEtLoyIrXXk-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-14" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-27" target="HiMKSJFquRK0mIlwyRFI-31">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-27" value="model_comparison_all" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="-896" y="566" width="160" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-42" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="xary-zVek9Bg-A1b1ZmA-9">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="-630" y="564" as="sourcePoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-47" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-42">
-          <mxGeometry x="-0.4883" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-29" target="HiMKSJFquRK0mIlwyRFI-30">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-49" value="if perturbed_data is None" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-37">
-          <mxGeometry x="-0.0507" y="4" relative="1" as="geometry">
-            <mxPoint x="-1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-29" value="generate_dataset" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-546" y="566" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-30" value="_perturb_data" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-376" y="636" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-6" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-31" target="HC1H8j6nMwEtLoyIrXXk-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-10" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-31" target="HiMKSJFquRK0mIlwyRFI-33">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-11" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-31" target="HC1H8j6nMwEtLoyIrXXk-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-31" value="cal_model_weight" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="-871" y="466" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-32" value="plot_just_analysis" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-871" y="736" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-33" value="plot_model_weights" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-1016" y="416" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-34" value="plot_bayes_factor" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-446" y="431" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-51" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;Discrepancy&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="360" y="1039.82" width="200" height="130" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-52" value="get_sample" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="400" y="1079.82" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-5" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.25;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="HiMKSJFquRK0mIlwyRFI-34">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-20" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="HC1H8j6nMwEtLoyIrXXk-17">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-21" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="HiMKSJFquRK0mIlwyRFI-29">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-1" value="calc_bayes_factors" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="-666" y="466" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-2" value="calc_model_weights" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-1066" y="566" width="130" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-4" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HiMKSJFquRK0mIlwyRFI-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-12" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HC1H8j6nMwEtLoyIrXXk-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-16" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="xary-zVek9Bg-A1b1ZmA-9">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-23" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HC1H8j6nMwEtLoyIrXXk-16">
-          <mxGeometry x="-0.5478" y="3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-18" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.25;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HC1H8j6nMwEtLoyIrXXk-17">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-22" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HiMKSJFquRK0mIlwyRFI-29">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-3" value="calc_justifiability_analysis" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="-896" y="666" width="160" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-17" value="setup" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-666" y="566" width="110" height="50" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-  <diagram id="sQf09xvhinkT827TE7Va" name="Function structure Engine">
-    <mxGraphModel dx="1436" dy="968" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-1" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;Engine&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="130" y="140" width="1390" height="690" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-2" value="hellinger_distance" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1340" y="50" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-3" value="logpdf" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1050" y="50" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-4" value="subdomain" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="625" y="50" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-5" value="start_engine" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="250" y="680" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-32" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-6" target="JXjM7l_erEiZMkSmYBvl-5">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-6" value="train_normal" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="170" y="420" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-10" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-7" target="JXjM7l_erEiZMkSmYBvl-9">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="335" y="335" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-33" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-7" target="JXjM7l_erEiZMkSmYBvl-6">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-7" value="train_sequential" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="170" y="310" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-8" value="eval_metamodel" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="190" y="210" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-7" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-18">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-19" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-23">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-20" value="if len(obs_data) != 0" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-19">
-          <mxGeometry x="0.8137" relative="1" as="geometry">
-            <mxPoint x="-57" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-21" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.25;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-24">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-22" value="if len(obs_data) != 0" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-21">
-          <mxGeometry x="0.7684" y="3" relative="1" as="geometry">
-            <mxPoint x="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-23" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-25">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-24" value="if expdes.valid_model_runs" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-23">
-          <mxGeometry x="0.606" y="3" relative="1" as="geometry">
-            <mxPoint x="-16" y="3" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-25" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-26">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-26" value="if mc_ref and pce" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-25">
-          <mxGeometry x="0.7094" y="-3" relative="1" as="geometry">
-            <mxPoint x="-31" y="-3" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-9" value="train_seq_design" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="315" y="310" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-12" value="util_VarBasedDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="670" y="648" width="130" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-28" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-13" target="JXjM7l_erEiZMkSmYBvl-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-31" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-13" target="JXjM7l_erEiZMkSmYBvl-5">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-38" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-15" target="JXjM7l_erEiZMkSmYBvl-13">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-39" value="if method == &#39;bayesactdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=default;" vertex="1" connectable="0" parent="JXjM7l_erEiZMkSmYBvl-38">
-          <mxGeometry x="-0.6235" y="2" relative="1" as="geometry">
-            <mxPoint x="289" y="2" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-12" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-13" target="JXjM7l_erEiZMkSmYBvl-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-15" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-12">
-          <mxGeometry x="0.7865" y="4" relative="1" as="geometry">
-            <mxPoint x="-91" y="185" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-13" value="util_BayesianActiveDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1020" y="680" width="150" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-34" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-14" target="JXjM7l_erEiZMkSmYBvl-6">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-13" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-14" target="JXjM7l_erEiZMkSmYBvl-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-16" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-13">
-          <mxGeometry x="0.197" y="-3" relative="1" as="geometry">
-            <mxPoint x="-1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-14" value="utilBayesianDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="880" y="730" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-15" target="JXjM7l_erEiZMkSmYBvl-12">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-42" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-15" target="JXjM7l_erEiZMkSmYBvl-14">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-43" value="if method == &#39;bayesoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="JXjM7l_erEiZMkSmYBvl-42">
-          <mxGeometry x="0.6143" y="-3" relative="1" as="geometry">
-            <mxPoint x="3" y="29" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-15" value="run_util_func" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="660" y="450" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-36" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-16" target="JXjM7l_erEiZMkSmYBvl-12">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-41" value="if method == &#39;varoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="JXjM7l_erEiZMkSmYBvl-36">
-          <mxGeometry x="-0.5992" relative="1" as="geometry">
-            <mxPoint x="-197" y="62" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-44" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-16" target="JXjM7l_erEiZMkSmYBvl-13">
-          <mxGeometry relative="1" as="geometry">
-            <Array as="points">
-              <mxPoint x="965" y="590" />
-              <mxPoint x="1095" y="590" />
-            </Array>
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-27" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-16" target="JXjM7l_erEiZMkSmYBvl-14">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-16" value="dual_annealing" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="910" y="450" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-5" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-17" target="JXjM7l_erEiZMkSmYBvl-18">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-6" value="if exploit _method is &#39;bayesoptdesign&#39;,&lt;br style=&quot;border-color: var(--border-color);&quot;&gt;&#39;bayesactdesign&#39; or &#39;varoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-5">
-          <mxGeometry x="0.1312" y="2" relative="1" as="geometry">
-            <mxPoint x="17" y="-2" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-17" value="tradeoff_weights" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="980" y="210" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-30" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-4">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-1" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-15">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="790" y="280.0000000000002" as="sourcePoint" />
-            <mxPoint x="690" y="499.9999999999998" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-2" value="if exploit _method is &#39;bayesoptdesign&#39;,&lt;br&gt;&#39;bayesactdesign&#39; or &#39;varoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-1">
-          <mxGeometry x="0.1579" relative="1" as="geometry">
-            <mxPoint x="-15" y="49" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-3" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-16">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="680" y="205.05882352941194" as="sourcePoint" />
-            <mxPoint x="805" y="779.9999999999998" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-4" value="if explore_method == &#39;dual annealing&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-3">
-          <mxGeometry x="-0.6061" relative="1" as="geometry">
-            <mxPoint x="270" y="46" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-9" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-20">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-10" value="if exploit_method == &#39;alphabetic&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-9">
-          <mxGeometry x="0.8144" y="1" relative="1" as="geometry">
-            <mxPoint x="74" y="-1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-18" value="choose_next_sample" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="610" y="210" width="140" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-20" value="util_AlphOptDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="330" y="210" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-21" value="_normpdf" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1340" y="430" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-29" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-22" target="JXjM7l_erEiZMkSmYBvl-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-22" value="_corr_factor_BME" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1130" y="220" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-23" value="_posteriorPlot" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="520" y="440" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-27" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-24" target="JXjM7l_erEiZMkSmYBvl-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-11" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-24" target="JXjM7l_erEiZMkSmYBvl-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-14" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-11">
-          <mxGeometry x="0.0929" y="-1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-17" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-24" target="JXjM7l_erEiZMkSmYBvl-22">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-18" value="commented out?" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-17">
-          <mxGeometry x="-0.1477" y="3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-24" value="_BME_Calculator" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1340" y="220" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-25" value="_validError" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="520" y="510" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-26" value="_error_Mean_Std" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="520" y="580" width="110" height="50" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-  <diagram id="ME5gyYpVqUByTnAIOcMV" name="Parameter and function interaction">
-    <mxGraphModel dx="2049" dy="1366" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-33" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-1" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-54" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-1" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-61" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-1" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-1" value="engine" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="160" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-3" value="Discrepancy" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="240" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-71" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-4" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-4" value="emulator" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="320" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-5" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-57" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-5" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-65" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-5" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-5" value="name" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="400" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-47" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-6" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-6" value="bootstrap" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="480" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-7" value="req_outputs" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="560" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-79" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-8" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-8" value="selected_indices" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="640" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-35" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-9" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-55" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-9" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-67" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-9" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-9" value="prior_samples" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="720" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-36" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-11" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-68" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-11" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-11" value="n_prior_samples" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="800" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-38" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-12" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-80" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-12" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-12" value="measured_data" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="880" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-58" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-13" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-13" value="inference_method" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="960" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-14" value="mcmc_params" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1040" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-63" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-15" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-15" value="perturbed_data" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1120" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-45" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-16" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-77" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-16" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-16" value="bayes_loocv" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1200" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-64" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-17" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-17" value="n_bootstrap_itrs" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1280" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-18" value="bootstrap_noise" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1360" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-46" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-19" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-78" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-19" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-19" value="just_analysis" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1440" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-20" value="valid_metrics" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1520" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-52" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-21" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-21" value="plot_post_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1600" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-51" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-22" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-22" value="plot_map_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1680" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-23" value="max_a_posteriori" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1760" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-24" value="corner_title_fmt" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1840" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-34" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-25" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-25" value="out_dir" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1920" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-50" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-26" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-66" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-26" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-26" value="error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2000" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-56" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-27" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-72" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-27" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-27" value="bias_inputs" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2080" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-41" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-28" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-28" value="measurement_error" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2160" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-44" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-29" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-81" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-29" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-29" value="sigma2s" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2240" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-30" value="log_likes" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2320" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-82" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-31" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-31" value="dtype" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2400" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-32" value="create_inference" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="400" y="20" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-40" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-39" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-39" value="n_tot_measurement" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2480" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-43" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-42" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-42" value="Discrepancy" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2560" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-49" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-48" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-59" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-48" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-48" value="posterior_df" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2640" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-53" value="create_error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="560" y="20" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-60" value="perform_bootstrap" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="720" y="20" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-75" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-69" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-69" value="__mean_pce_prior_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2720" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-76" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-70" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-70" value="_std_pce_prior_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2800" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-74" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-73" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-73" value="__model_prior_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2880" width="120" height="60" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-  <diagram id="QgiNX2WXFOBDsDgzoFY9" name="Folder structure">
-    <mxGraphModel dx="1436" dy="968" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="KLYezTmecfuvBG8KQe-n-1" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="140" y="80" width="750" height="550" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-2" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="170" y="110" width="700" height="220" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-3" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="170" y="370" width="180" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-4" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="170" y="440" width="180" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-5" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="170" y="500" width="180" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-6" value="adaptPlot" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="190" y="150" width="70" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-7" value="apoly_construction" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="280" y="150" width="140" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-8" value="bayes_linear" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="440" y="150" width="90" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-9" value="engine" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="550" y="150" width="70" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-11" value="eval_rec_rule" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="640" y="150" width="100" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-12" value="exp_designs" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="760" y="150" width="90" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-13" value="exploration" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="190" y="210" width="80" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-14" value="glexindex" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="290" y="210" width="70" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-15" value="input_space" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="380" y="210" width="80" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-16" value="inputs" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="480" y="210" width="70" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-17" value="meta_model_engine" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="570" y="210" width="160" height="50" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-</mxfile>
diff --git a/docs/diagrams/Structure_BayesInf.drawio b/docs/diagrams/Structure_BayesInf.drawio
deleted file mode 100644
index 2e28cbcb3..000000000
--- a/docs/diagrams/Structure_BayesInf.drawio
+++ /dev/null
@@ -1,964 +0,0 @@
-<mxfile host="Electron" modified="2024-04-19T16:08:43.773Z" agent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/22.1.11 Chrome/114.0.5735.289 Electron/25.9.8 Safari/537.36" etag="2ELAo-FvqOEnLxZhqqO4" version="22.1.11" type="device" pages="4">
-  <diagram name="Class and function structure" id="efOe0Jku58RX-i1bv-3b">
-    <mxGraphModel dx="3735" dy="1372" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-22" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;MCMC&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1270" y="360" width="770" height="380" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-1" value="_kernel_rbf" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1020" y="200" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-2" value="_logpdf" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="820" y="140" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-10" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;BayesInf&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-120" y="290" width="1310" height="680" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-24" value="if self.bootstrap &lt;br&gt;or self.bayes_loocv &lt;br&gt;or self.just_analysis" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;labelBackgroundColor=#ffae00;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-13">
-          <mxGeometry x="0.2902" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-31" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-18">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-42" value="if self.name != &#39;valid&#39;&lt;br&gt;and self.inference_method != &#39;rejection&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=default;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-31">
-          <mxGeometry x="0.5646" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-32" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="HiMKSJFquRK0mIlwyRFI-5">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-43" value="if self.inference_method == &#39;mcmc&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-32">
-          <mxGeometry x="-0.0958" y="-1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-33" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-19">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-52" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#C2C2C2;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-33">
-          <mxGeometry x="-0.112" y="1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-34" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-47" value="if self.plot_post_pred" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-34">
-          <mxGeometry x="0.2399" y="-1" relative="1" as="geometry">
-            <mxPoint y="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-35" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-20">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-46" value="if self.plot_map_pred" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-35">
-          <mxGeometry x="0.4183" y="-1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-54" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-55" value="if self.bootstrap" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-54">
-          <mxGeometry x="0.1816" y="3" relative="1" as="geometry">
-            <mxPoint x="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-57" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-56">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-58" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-57">
-          <mxGeometry x="0.7182" y="2" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-60" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="xary-zVek9Bg-A1b1ZmA-59">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-61" value="if self.error_model&lt;br&gt;and self.name == &#39;calib&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-60">
-          <mxGeometry x="0.3024" y="2" relative="1" as="geometry">
-            <mxPoint x="67" y="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-54" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-9" target="HiMKSJFquRK0mIlwyRFI-51">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-55" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-54">
-          <mxGeometry x="0.8253" y="3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-9" value="create_inference" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="405" y="539" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-25" value="if len(self.perturbed_data) == 0" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-14">
-          <mxGeometry x="0.3402" relative="1" as="geometry">
-            <mxPoint y="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-27" value="if not self.emulator" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-15">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-29" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-16">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-44" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#cdcbcb;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-29">
-          <mxGeometry x="0.4722" y="1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-30" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-17">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-41" value="if self.emulator" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-30">
-          <mxGeometry x="0.6143" y="-3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-62" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-13" target="xary-zVek9Bg-A1b1ZmA-59">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="340" y="680" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-63" value="if self.error_model&lt;br&gt;and self.name == &#39;valid&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=default;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-62">
-          <mxGeometry x="-0.3906" relative="1" as="geometry">
-            <mxPoint y="121" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-13" value="perform_bootstrap" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="50" y="335" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-14" value="_perturb_data" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-75" y="460" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-15" value="_eval_model" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1050" y="660" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-38" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-16" target="xary-zVek9Bg-A1b1ZmA-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-49" value="if hasattr bias_inputs&amp;nbsp;&lt;br&gt;and not hasattr error_model" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#ffae00;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-38">
-          <mxGeometry x="0.3126" y="-3" relative="1" as="geometry">
-            <mxPoint x="-103" y="31" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-39" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-16" target="xary-zVek9Bg-A1b1ZmA-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-16" value="normpdf" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="650" y="455" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-40" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-17" target="xary-zVek9Bg-A1b1ZmA-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-50" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#cdcbcb;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-40">
-          <mxGeometry x="-0.6073" y="-5" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-17" value="_corr_factor_BME" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="650" y="385" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-18" value="_rejection_sampling" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="280" y="890" width="120" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-26" value="if not self.emulator&amp;nbsp;&lt;br&gt;and not self.inference_method == &#39;rejection&#39;&amp;nbsp;&lt;br&gt;and self.name == &#39;calib" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-19" target="xary-zVek9Bg-A1b1ZmA-15">
-          <mxGeometry x="-0.0559" y="15" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-19" target="xary-zVek9Bg-A1b1ZmA-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-48" value="if sigma2_prior is not None&lt;br&gt;and if hasattr bias_inputs&lt;br&gt;and if not hasattr error_model" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#ffae00;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-37">
-          <mxGeometry x="-0.5544" y="-1" relative="1" as="geometry">
-            <mxPoint x="1" y="-5" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-19" value="_posterior_predictive" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="690" y="589" width="130" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-28" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="xary-zVek9Bg-A1b1ZmA-20" target="xary-zVek9Bg-A1b1ZmA-15">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-45" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#cdcbcb;" vertex="1" connectable="0" parent="xary-zVek9Bg-A1b1ZmA-28">
-          <mxGeometry x="0.0517" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-20" value="_plot_max_a_posteriori" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="495" y="790" width="140" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-21" value="plot_post_predictive" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="630" y="720" width="120" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-36" value="Note: Arrows indicate function calls, beginning calls the end" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
-          <mxGeometry x="10" y="10" width="190" height="30" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-51" value="Color meanings:&lt;br&gt;&lt;span style=&quot;white-space: pre;&quot;&gt;&#x9;&lt;/span&gt;red: wrong, change&lt;br&gt;&lt;span style=&quot;white-space: pre;&quot;&gt;&#x9;&lt;/span&gt;orange: seems off, look at again&lt;br&gt;&lt;span style=&quot;white-space: pre;&quot;&gt;&#x9;&lt;/span&gt;light beige: has been removed" style="text;html=1;strokeColor=none;fillColor=none;align=left;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
-          <mxGeometry x="20" y="70" width="220" height="30" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-53" value="plot_log_BME" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="150" y="820" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-56" value="plot_post_params" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="660" y="840" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="xary-zVek9Bg-A1b1ZmA-59" value="create_error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="45" y="740" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-1" value="_check_ranges" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1595" y="280" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-2" value="gelman_rubin" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1350" y="250" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-3" value="_iterative_scheme" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="2055" y="620" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-21" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-4" target="HiMKSJFquRK0mIlwyRFI-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-24" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-4" target="HiMKSJFquRK0mIlwyRFI-11">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-4" value="_my_ESS" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="1350" y="100" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-14" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-8">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-19" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-10">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-22" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-53" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-5" target="HiMKSJFquRK0mIlwyRFI-52">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-56" value="if opts_sigma != &#39;B&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=#FF9A03;" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-53">
-          <mxGeometry x="0.7377" y="1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-5" value="run_sampler" style="rounded=0;whiteSpace=wrap;html=1;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="1350" y="534" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-20" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-6" target="HiMKSJFquRK0mIlwyRFI-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-6" value="log_prior" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1595" y="510" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-15" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-7" target="HiMKSJFquRK0mIlwyRFI-9">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-16" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-15">
-          <mxGeometry x="0.0246" y="2" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-7" value="log_likelihood" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1760" y="539" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-12" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-8" target="HiMKSJFquRK0mIlwyRFI-6">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-17" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-12">
-          <mxGeometry x="0.4587" y="4" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-13" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-8" target="HiMKSJFquRK0mIlwyRFI-7">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-18" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-13">
-          <mxGeometry x="0.6826" y="4" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-8" value="log_posterior" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1480" y="610" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-9" value="eval_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1760" y="400" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-10" value="train_error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="1450" y="420" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-23" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-11" target="HiMKSJFquRK0mIlwyRFI-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-11" value="marginal_llk_emcee" style="rounded=0;whiteSpace=wrap;html=1;fillColor=#f9f7ed;strokeColor=#CCC1AA;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="1870" y="620" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-25" value="Never used!" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;fontColor=#CCC1AA;" vertex="1" parent="1">
-          <mxGeometry x="1880" y="680" width="100" height="30" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-26" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;BayesModelComp&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-1096" y="380" width="840" height="420" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-9" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-27" target="HC1H8j6nMwEtLoyIrXXk-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-13" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.75;entryDx=0;entryDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-27" target="HC1H8j6nMwEtLoyIrXXk-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-14" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-27" target="HiMKSJFquRK0mIlwyRFI-31">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-27" value="model_comparison_all" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="-896" y="566" width="160" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-42" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="xary-zVek9Bg-A1b1ZmA-9">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="-630" y="564" as="sourcePoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-47" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-42">
-          <mxGeometry x="-0.4883" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-29" target="HiMKSJFquRK0mIlwyRFI-30">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-49" value="if perturbed_data is None" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HiMKSJFquRK0mIlwyRFI-37">
-          <mxGeometry x="-0.0507" y="4" relative="1" as="geometry">
-            <mxPoint x="-1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-29" value="generate_dataset" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-546" y="566" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-30" value="_perturb_data" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-376" y="636" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-6" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-31" target="HC1H8j6nMwEtLoyIrXXk-1">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-10" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-31" target="HiMKSJFquRK0mIlwyRFI-33">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-11" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HiMKSJFquRK0mIlwyRFI-31" target="HC1H8j6nMwEtLoyIrXXk-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-31" value="cal_model_weight" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="-871" y="466" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-32" value="plot_just_analysis" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-871" y="736" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-33" value="plot_model_weights" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-1016" y="416" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-34" value="plot_bayes_factor" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-446" y="431" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-51" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;Discrepancy&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="360" y="1039.82" width="200" height="130" as="geometry" />
-        </mxCell>
-        <mxCell id="HiMKSJFquRK0mIlwyRFI-52" value="get_sample" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="400" y="1079.82" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-5" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.25;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="HiMKSJFquRK0mIlwyRFI-34">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-20" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="HC1H8j6nMwEtLoyIrXXk-17">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-21" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-1" target="HiMKSJFquRK0mIlwyRFI-29">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-1" value="calc_bayes_factors" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="-666" y="466" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-2" value="calc_model_weights" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-1066" y="566" width="130" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-4" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HiMKSJFquRK0mIlwyRFI-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-12" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HC1H8j6nMwEtLoyIrXXk-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-16" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="xary-zVek9Bg-A1b1ZmA-9">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-23" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="HC1H8j6nMwEtLoyIrXXk-16">
-          <mxGeometry x="-0.5478" y="3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-18" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.25;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HC1H8j6nMwEtLoyIrXXk-17">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-22" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="HC1H8j6nMwEtLoyIrXXk-3" target="HiMKSJFquRK0mIlwyRFI-29">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-3" value="calc_justifiability_analysis" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="-896" y="666" width="160" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="HC1H8j6nMwEtLoyIrXXk-17" value="setup" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="-666" y="566" width="110" height="50" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-  <diagram id="sQf09xvhinkT827TE7Va" name="Function structure Engine">
-    <mxGraphModel dx="1436" dy="968" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-1" value="&lt;p style=&quot;margin:0px;margin-top:4px;text-align:center;&quot;&gt;&lt;b&gt;Engine&lt;/b&gt;&lt;/p&gt;&lt;hr size=&quot;1&quot;&gt;&lt;div style=&quot;height:2px;&quot;&gt;&lt;/div&gt;" style="verticalAlign=top;align=left;overflow=fill;fontSize=12;fontFamily=Helvetica;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="130" y="140" width="1390" height="690" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-2" value="hellinger_distance" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1340" y="50" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-3" value="logpdf" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1050" y="50" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-4" value="subdomain" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="625" y="50" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-5" value="start_engine" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="250" y="680" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-32" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-6" target="JXjM7l_erEiZMkSmYBvl-5">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-6" value="train_normal" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="170" y="420" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-10" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-7" target="JXjM7l_erEiZMkSmYBvl-9">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="335" y="335" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-33" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-7" target="JXjM7l_erEiZMkSmYBvl-6">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-7" value="train_sequential" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="170" y="310" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-8" value="eval_metamodel" style="html=1;whiteSpace=wrap;strokeWidth=2;" vertex="1" parent="1">
-          <mxGeometry x="190" y="210" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-7" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-18">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-19" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-23">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-20" value="if len(obs_data) != 0" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-19">
-          <mxGeometry x="0.8137" relative="1" as="geometry">
-            <mxPoint x="-57" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-21" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.25;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-24">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-22" value="if len(obs_data) != 0" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-21">
-          <mxGeometry x="0.7684" y="3" relative="1" as="geometry">
-            <mxPoint x="1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-23" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-25">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-24" value="if expdes.valid_model_runs" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-23">
-          <mxGeometry x="0.606" y="3" relative="1" as="geometry">
-            <mxPoint x="-16" y="3" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-25" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-9" target="JXjM7l_erEiZMkSmYBvl-26">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-26" value="if mc_ref and pce" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-25">
-          <mxGeometry x="0.7094" y="-3" relative="1" as="geometry">
-            <mxPoint x="-31" y="-3" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-9" value="train_seq_design" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="315" y="310" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-12" value="util_VarBasedDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="670" y="648" width="130" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-28" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-13" target="JXjM7l_erEiZMkSmYBvl-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-31" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-13" target="JXjM7l_erEiZMkSmYBvl-5">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-38" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-15" target="JXjM7l_erEiZMkSmYBvl-13">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-39" value="if method == &#39;bayesactdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];labelBackgroundColor=default;" vertex="1" connectable="0" parent="JXjM7l_erEiZMkSmYBvl-38">
-          <mxGeometry x="-0.6235" y="2" relative="1" as="geometry">
-            <mxPoint x="289" y="2" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-12" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-13" target="JXjM7l_erEiZMkSmYBvl-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-15" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-12">
-          <mxGeometry x="0.7865" y="4" relative="1" as="geometry">
-            <mxPoint x="-91" y="185" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-13" value="util_BayesianActiveDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1020" y="680" width="150" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-34" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-14" target="JXjM7l_erEiZMkSmYBvl-6">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-13" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-14" target="JXjM7l_erEiZMkSmYBvl-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-16" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-13">
-          <mxGeometry x="0.197" y="-3" relative="1" as="geometry">
-            <mxPoint x="-1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-14" value="utilBayesianDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="880" y="730" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-15" target="JXjM7l_erEiZMkSmYBvl-12">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-42" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-15" target="JXjM7l_erEiZMkSmYBvl-14">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-43" value="if method == &#39;bayesoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="JXjM7l_erEiZMkSmYBvl-42">
-          <mxGeometry x="0.6143" y="-3" relative="1" as="geometry">
-            <mxPoint x="3" y="29" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-15" value="run_util_func" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="660" y="450" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-36" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-16" target="JXjM7l_erEiZMkSmYBvl-12">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-41" value="if method == &#39;varoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="JXjM7l_erEiZMkSmYBvl-36">
-          <mxGeometry x="-0.5992" relative="1" as="geometry">
-            <mxPoint x="-197" y="62" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-44" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-16" target="JXjM7l_erEiZMkSmYBvl-13">
-          <mxGeometry relative="1" as="geometry">
-            <Array as="points">
-              <mxPoint x="965" y="590" />
-              <mxPoint x="1095" y="590" />
-            </Array>
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-27" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-16" target="JXjM7l_erEiZMkSmYBvl-14">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-16" value="dual_annealing" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="910" y="450" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-5" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-17" target="JXjM7l_erEiZMkSmYBvl-18">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-6" value="if exploit _method is &#39;bayesoptdesign&#39;,&lt;br style=&quot;border-color: var(--border-color);&quot;&gt;&#39;bayesactdesign&#39; or &#39;varoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-5">
-          <mxGeometry x="0.1312" y="2" relative="1" as="geometry">
-            <mxPoint x="17" y="-2" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-17" value="tradeoff_weights" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="980" y="210" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-30" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-4">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-1" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-15">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="790" y="280.0000000000002" as="sourcePoint" />
-            <mxPoint x="690" y="499.9999999999998" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-2" value="if exploit _method is &#39;bayesoptdesign&#39;,&lt;br&gt;&#39;bayesactdesign&#39; or &#39;varoptdesign&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-1">
-          <mxGeometry x="0.1579" relative="1" as="geometry">
-            <mxPoint x="-15" y="49" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-3" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.75;exitY=1;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-16">
-          <mxGeometry relative="1" as="geometry">
-            <mxPoint x="680" y="205.05882352941194" as="sourcePoint" />
-            <mxPoint x="805" y="779.9999999999998" as="targetPoint" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-4" value="if explore_method == &#39;dual annealing&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-3">
-          <mxGeometry x="-0.6061" relative="1" as="geometry">
-            <mxPoint x="270" y="46" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-9" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-18" target="JXjM7l_erEiZMkSmYBvl-20">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-10" value="if exploit_method == &#39;alphabetic&#39;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-9">
-          <mxGeometry x="0.8144" y="1" relative="1" as="geometry">
-            <mxPoint x="74" y="-1" as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-18" value="choose_next_sample" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="610" y="210" width="140" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-20" value="util_AlphOptDesign" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="330" y="210" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-21" value="_normpdf" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1340" y="430" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-29" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-22" target="JXjM7l_erEiZMkSmYBvl-3">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-22" value="_corr_factor_BME" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1130" y="220" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-23" value="_posteriorPlot" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="520" y="440" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-27" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-24" target="JXjM7l_erEiZMkSmYBvl-2">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-11" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-24" target="JXjM7l_erEiZMkSmYBvl-21">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-14" value="always" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-11">
-          <mxGeometry x="0.0929" y="-1" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-17" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="JXjM7l_erEiZMkSmYBvl-24" target="JXjM7l_erEiZMkSmYBvl-22">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="W5_FOelZ0qj-h3Gb0n3K-18" value="commented out?" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="W5_FOelZ0qj-h3Gb0n3K-17">
-          <mxGeometry x="-0.1477" y="3" relative="1" as="geometry">
-            <mxPoint as="offset" />
-          </mxGeometry>
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-24" value="_BME_Calculator" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="1340" y="220" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-25" value="_validError" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="520" y="510" width="110" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="JXjM7l_erEiZMkSmYBvl-26" value="_error_Mean_Std" style="html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="520" y="580" width="110" height="50" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-  <diagram id="ME5gyYpVqUByTnAIOcMV" name="Parameter and function interaction">
-    <mxGraphModel dx="2049" dy="1366" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-33" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-1" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-54" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-1" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-61" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-1" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-1" value="engine" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="160" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-3" value="Discrepancy" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="240" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-71" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-4" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-4" value="emulator" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="320" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-37" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-5" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-57" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-5" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-65" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-5" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-5" value="name" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="400" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-47" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-6" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-6" value="bootstrap" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="480" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-7" value="req_outputs" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="560" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-79" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-8" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-8" value="selected_indices" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="640" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-35" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-9" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-55" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-9" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-67" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-9" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-9" value="prior_samples" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="720" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-36" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-11" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-68" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-11" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-11" value="n_prior_samples" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="800" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-38" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-12" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-80" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-12" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-12" value="measured_data" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="880" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-58" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-13" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-13" value="inference_method" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="960" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-14" value="mcmc_params" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1040" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-63" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-15" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-15" value="perturbed_data" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1120" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-45" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-16" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-77" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-16" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-16" value="bayes_loocv" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1200" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-64" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-17" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-17" value="n_bootstrap_itrs" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1280" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-18" value="bootstrap_noise" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1360" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-46" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-19" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-78" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-19" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-19" value="just_analysis" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1440" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-20" value="valid_metrics" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1520" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-52" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-21" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-21" value="plot_post_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1600" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-51" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-22" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-22" value="plot_map_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1680" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-23" value="max_a_posteriori" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1760" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-24" value="corner_title_fmt" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1840" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-34" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-25" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-25" value="out_dir" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="1920" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-50" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-26" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-66" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-26" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-26" value="error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2000" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-56" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-27" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-72" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-27" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-27" value="bias_inputs" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2080" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-41" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-28" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-28" value="measurement_error" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2160" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-44" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-29" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-81" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-29" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-29" value="sigma2s" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2240" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-30" value="log_likes" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2320" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-82" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-31" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-31" value="dtype" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2400" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-32" value="create_inference" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="400" y="20" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-40" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-39" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-39" value="n_tot_measurement" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2480" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-43" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-42" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-42" value="Discrepancy" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2560" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-49" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-48" target="K5oJ7VEt7dPmeK6pba1f-32">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-59" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-48" target="K5oJ7VEt7dPmeK6pba1f-53">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-48" value="posterior_df" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2640" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-53" value="create_error_model" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="560" y="20" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-60" value="perform_bootstrap" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="720" y="20" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-75" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-69" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-69" value="__mean_pce_prior_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2720" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-76" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-70" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-70" value="_std_pce_prior_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2800" width="120" height="60" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-74" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;" edge="1" parent="1" source="K5oJ7VEt7dPmeK6pba1f-73" target="K5oJ7VEt7dPmeK6pba1f-60">
-          <mxGeometry relative="1" as="geometry" />
-        </mxCell>
-        <mxCell id="K5oJ7VEt7dPmeK6pba1f-73" value="__model_prior_pred" style="rounded=0;whiteSpace=wrap;html=1;" vertex="1" parent="1">
-          <mxGeometry x="40" y="2880" width="120" height="60" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-  <diagram id="QgiNX2WXFOBDsDgzoFY9" name="Folder structure">
-    <mxGraphModel dx="1436" dy="968" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="850" pageHeight="1100" math="0" shadow="0">
-      <root>
-        <mxCell id="0" />
-        <mxCell id="1" parent="0" />
-        <mxCell id="KLYezTmecfuvBG8KQe-n-1" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="140" y="80" width="750" height="550" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-2" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="170" y="110" width="700" height="220" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-3" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="170" y="370" width="180" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-4" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="170" y="440" width="180" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-5" value="" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="170" y="500" width="180" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-6" value="adaptPlot" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="190" y="150" width="70" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-7" value="apoly_construction" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="280" y="150" width="140" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-8" value="bayes_linear" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="440" y="150" width="90" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-9" value="engine" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="550" y="150" width="70" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-11" value="eval_rec_rule" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="640" y="150" width="100" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-12" value="exp_designs" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="760" y="150" width="90" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-13" value="exploration" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="190" y="210" width="80" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-14" value="glexindex" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="290" y="210" width="70" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-15" value="input_space" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="380" y="210" width="80" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-16" value="inputs" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="480" y="210" width="70" height="50" as="geometry" />
-        </mxCell>
-        <mxCell id="KLYezTmecfuvBG8KQe-n-17" value="meta_model_engine" style="shape=folder;fontStyle=1;spacingTop=10;tabWidth=40;tabHeight=14;tabPosition=left;html=1;whiteSpace=wrap;" vertex="1" parent="1">
-          <mxGeometry x="570" y="210" width="160" height="50" as="geometry" />
-        </mxCell>
-      </root>
-    </mxGraphModel>
-  </diagram>
-</mxfile>
diff --git a/examples/.coverage b/examples/.coverage
deleted file mode 100644
index 254e10e4371d703eefec0f0437f9c0575be3f5ec..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 53248
zcmeI)U2oe|7zc1W>5?T)<e_Pbs%q-Ffv#HHQSAaTE}(1`Ol%s1!ObGF6X&Eci|x$E
z-r5U<vLQ`E;tLGKXW*;w5xASU;R<QecER)b*iF(l({AuI_*;2#?DP6NZ+Vlyym`%w
zmFS0&FJp1hI%V0m^^p*kW!323q<g+qX=fpSLC^No{=D6qwb}o(>3nT9DnD7yH_aQ)
zM&p;}JBz<J{MwI;zgA_<r4!g7009U<;QuW!zS*d@mX_>?-^H>!RB<F-6`9B8<u7)&
zZ|#U%+aF)s5$2dUR~NKxY=~_Ug%89?MWXKwmGFX|=gQa%c11i;dFVu|9xrrsjFvhM
zxS7W1y^hzTSh3orBu0_v%V=NRRr~8{fGoFI?ZxH*C8E6DfJ2CLS@bm#slJL-;3{oO
zan9?l)z=<<U$3@KpSB;Xv`#W|>9#mi8#*wl%GEw4<c3j?OW%!T;0}~t7xJKnZb;?a
zi%o5#(7zCeV(0~FH3RL%UKj|qr`#k~y<;o5&S?2Yc+<6^wC-?)?wFWaUQ;@g$j8LY
zf@hKYRJb6_Stk)WYl@tfIY|62k$;=`BW@guVKR=xc`@h3jl-<6uiQ4rwB@9}GtHTk
zbBmf-N#NZ})IsB~i%HD&v>HLUF1QlZ1KwV&wBA2$ThuGnp1wDv_gfvA#9{inLyh0r
zqKC#0Z=I~RR#xoC_tK8SJe{r=>w}%zOs6?GJg4jM`T6~ZN6Bm_;ZZakim;y!8ICPX
z<4|v&9y1qm3>oo26JZvRO8Q!R8yA+!AiBe_JKNXN9-|cv5Sk0}VErU;d8lZvgB#vg
zk=#|A*;z9!yzY)fU1zISY3;1c>pBNrptE(bTOTh~s;%W^`(Z8ZMp<iRPt}<&l(*zD
z(`bImS5hZYK9Rg^lFVaFX32bvWM`;48`Vl{dwE`UvT&WPZ0F>-x=?MMIb)B_h~RmN
z?<Zy|!AB-T;Lt+w>ct<)cp#3JoexvNS5H)0ADo$2a4te;iyteGZkN&`KVC}peVX+7
zUY;p3ADI;SGqt~QUk+(D8A&gqAxm?xSpftG>`4kNyI~Ti{g+>+-ILdzNqJ2Ilh?UD
zg<4C4)A0U0)Y_wEm6t7jDL*P!2U7Daq*p~Z42MbvW^JGtS2gA7X_1!U=&JPLBn`dK
zY3$x#P0N%oE<DZOqCuCBFWT{a8T7LCikD%ND$V^oI5{6PiBLqbN`q3<5IvM;o@jQK
zXRJK^%@LV!s=n-*;m(aRmEwbeI+C1OjjqU8#h$MO2T2Q)cB}_d3pY}9lGf6-)$+@_
z0k>9Xiy!$@@=1OxH(s%;t@G#Yagq&2GtYE1E$Y6^i=DkJ&2*AOCnx>o22Hs1ZfClm
zu-y~~zce}U<Yrp1YZUTO*?XP(8h^mIo#z(aut5L<5P$##AOHafKmY;|fB*y_@ZS?C
z+a<fopZ`nFAC~hMJz#?X1Rwwb2tWV=5P$##AOHafK;X?3Xq3ulo%}Bz?>csQxt0A1
z;JwX@n-}X;tCI87a-KSWznK+8sUZLX2tWV=5P$##AOHafKmY;|$ORhZvv&S1K&f6{
zu4P{WnE(Gjv79G)NIXLT0uX=z1Rwwb2tWV=5P$##AW)n@&t9;`*Dqgcf2HX<2<_-@
zI2aD3)a@&woA@e-b^8-9{xs>fyK-M?x&nmPi^9FO+LQihsC1hyqd_0?clnA$omgoK
zfA|PpFM)smx1FCYx?zI=1Rwwb2tWV=5P$##AOHafK%jVm-op5L?ce_VkL&*x->}FL
z0uX=z1Rwwb2tWV=5P$##AW)z{*)Cg6^ZWk`%Xv{?&d3S^5P$##AOHafKmY;|fB*y_
z0D-~;%H?Lw{Qm#Ua-J2IEi!@t1Rwwb2tWV=5P$##AOHafK%gLjioH;)o%+o-zyCkC
zoaY5)jBFqP0SG_<0uX=z1Rwwb2tWV=5GYb$fxZDqfB&~m6`3vaf&c^{009U<00Izz
z00bZa0SG{#XaWBHAD{n=R!4pifB*y_009U<00Izz00bZafno)4{r_UUksAac009U<
W00Izz00bZa0SG{#V1e}d|Nj6bO?%Y<

diff --git a/examples/analytical-function/example_analytical_function.py b/examples/analytical-function/example_analytical_function.py
index 37900127a..52e7731b5 100644
--- a/examples/analytical-function/example_analytical_function.py
+++ b/examples/analytical-function/example_analytical_function.py
@@ -141,7 +141,7 @@ if __name__ == "__main__":
 
     # One-shot (normal) or Sequential Adaptive (sequential) Design
     ExpDesign.method = 'sequential'
-    ExpDesign.n_init_samples = 140#00#3*ndim
+    ExpDesign.n_init_samples = 100#3*ndim
 
     # Sampling methods
     # 1) random 2) latin_hypercube 3) sobol 4) halton 5) hammersley
@@ -227,8 +227,7 @@ if __name__ == "__main__":
     MetaModelOpts.ExpDesign = ExpDesign
     engine = Engine(MetaModelOpts, Model, ExpDesign)
     engine.start_engine()
-    #engine.train_sequential()
-    engine.train_normal()
+    engine.train_sequential()
 
     # Load the objects
     # with open(f"PCEModel_{Model.name}.pkl", "rb") as input:
@@ -267,12 +266,12 @@ if __name__ == "__main__":
 
     # BayesOpts.selected_indices = [0, 3, 5,  7, 9]
     # BME Bootstrap
-    BayesOpts.bootstrap = True
-    BayesOpts.n_bootstrap_itrs = 500
-    BayesOpts.bootstrap_noise = 100
+    # BayesOpts.bootstrap = True
+    # BayesOpts.n_bootstrap_itrs = 500
+    # BayesOpts.bootstrap_noise = 100
 
     # Bayesian cross validation
-    BayesOpts.bayes_loocv = True  # TODO: test what this does
+    # BayesOpts.bayes_loocv = True
 
     # Select the inference method
     import emcee
@@ -297,34 +296,31 @@ if __name__ == "__main__":
     BayesOpts.Discrepancy = DiscrepancyOpts
 
     # -- (Option C) --
-    if 0:
-        DiscOutputOpts = Input()
-        # # # OutputName = 'Z'
-        DiscOutputOpts.add_marginals()
-        DiscOutputOpts.Marginals[0].Nnme = '$\sigma^2_{\epsilon}$'
-        DiscOutputOpts.Marginals[0].dist_type = 'uniform'
-        DiscOutputOpts.Marginals[0].parameters =  [0, 10]
-        #BayesOpts.Discrepancy = {'known': DiscrepancyOpts,
-        #                          'infer': Discrepancy(DiscOutputOpts)}
-    
-        BayesOpts.bias_inputs = {'Z':np.arange(0, 10, 1.).reshape(-1,1) / 9}
-        
-        DiscOutputOpts = Input()
-        # OutputName = 'lambda'
-        DiscOutputOpts.add_marginals()
-        DiscOutputOpts.Marginals[0].name = '$\lambda$'
-        DiscOutputOpts.Marginals[0].dist_type = 'uniform'
-        DiscOutputOpts.Marginals[0].parameters = [0, 1]
-    
-        # # OutputName = 'sigma_f'
-        DiscOutputOpts.add_marginals()
-        DiscOutputOpts.Marginals[1].Name = '$\sigma_f$'
-        DiscOutputOpts.Marginals[1].dist_type = 'uniform'
-        DiscOutputOpts.Marginals[1].parameters = [0, 1e-4]
-        #BayesOpts.Discrepancy = Discrepancy(DiscOutputOpts)
-        BayesOpts.Discrepancy = {'known': DiscrepancyOpts,
-                                  'infer': Discrepancy(DiscOutputOpts)}
-    
+    # DiscOutputOpts = Input()
+    # # # OutputName = 'Z'
+    # DiscOutputOpts.add_marginals()
+    # DiscOutputOpts.Marginals[0].Nnme = '$\sigma^2_{\epsilon}$'
+    # DiscOutputOpts.Marginals[0].dist_type = 'uniform'
+    # DiscOutputOpts.Marginals[0].parameters =  [0, 10]
+    # BayesOpts.Discrepancy = {'known': DiscrepancyOpts,
+    #                           'infer': Discrepancy(DiscOutputOpts)}
+
+    # BayesOpts.bias_inputs = {'Z':np.arange(0, 10, 1.).reshape(-1,1) / 9}
+    # DiscOutputOpts = Input()
+    # # OutputName = 'lambda'
+    # DiscOutputOpts.add_marginals()
+    # DiscOutputOpts.Marginals[0].name = '$\lambda$'
+    # DiscOutputOpts.Marginals[0].dist_type = 'uniform'
+    # DiscOutputOpts.Marginals[0].parameters = [0, 1]
+
+    # # OutputName = 'sigma_f'
+    # DiscOutputOpts.add_marginals()
+    # DiscOutputOpts.Marginals[1].Name = '$\sigma_f$'
+    # DiscOutputOpts.Marginals[1].dist_type = 'uniform'
+    # DiscOutputOpts.Marginals[1].parameters = [0, 1e-4]
+    # BayesOpts.Discrepancy = Discrepancy(DiscOutputOpts)
+    # BayesOpts.Discrepancy = {'known': DiscrepancyOpts,
+    #                           'infer': Discrepancy(DiscOutputOpts)}
     # Start the calibration/inference
     Bayes_PCE = BayesOpts.create_inference()
 
diff --git a/examples/model-comparison/example_model_comparison.py b/examples/model-comparison/example_model_comparison.py
index d678898c1..ebd80fea8 100644
--- a/examples/model-comparison/example_model_comparison.py
+++ b/examples/model-comparison/example_model_comparison.py
@@ -23,7 +23,6 @@ import pandas as pd
 import joblib
 import sys
 sys.path.append("../../src/bayesvalidrox/")
-sys.path.append("../../src/")
 
 from bayesvalidrox.pylink.pylink import PyLinkForwardModel
 from bayesvalidrox.surrogate_models.inputs import Input
@@ -39,6 +38,7 @@ from bayes_inference.bayes_model_comparison import BayesModelComparison
 from bayesvalidrox.surrogate_models.engine import Engine
 
 if __name__ == "__main__":
+
     # Read data
     sigma = 0.6
     data = {
@@ -277,10 +277,10 @@ if __name__ == "__main__":
         "cosine": NL4_engine
         }
 
-    # BME Bootstrap options
+    # BME Bootstrap optuions
     opts_bootstrap = {
         "bootstrap": True,
-        "n_samples": 100,#0,#0, # TODO: difference between this and the n_bootstrap set below?
+        "n_samples": 10000,
         "Discrepancy": DiscrepancyOpts,
         "emulator": True,
         "plot_post_pred": False
@@ -289,10 +289,10 @@ if __name__ == "__main__":
     # Run model comparison
     BayesOpts = BayesModelComparison(
         justifiability=True,
-        n_bootstrap=100,#0,#00,
-        #just_n_meas=2
+        n_bootstarp=100,#00,
+        just_n_meas=2
         )
-    output_dict = BayesOpts.model_comparison_all(
+    output_dict = BayesOpts.create_model_comparison(
         meta_models,
         opts_bootstrap
         )
diff --git a/src/bayesvalidrox.egg-info/SOURCES.txt b/src/bayesvalidrox.egg-info/SOURCES.txt
index 344e98406..d6619704e 100644
--- a/src/bayesvalidrox.egg-info/SOURCES.txt
+++ b/src/bayesvalidrox.egg-info/SOURCES.txt
@@ -29,13 +29,10 @@ src/bayesvalidrox/surrogate_models/exploration.py
 src/bayesvalidrox/surrogate_models/glexindex.py
 src/bayesvalidrox/surrogate_models/input_space.py
 src/bayesvalidrox/surrogate_models/inputs.py
-src/bayesvalidrox/surrogate_models/meta_model_engine.py
 src/bayesvalidrox/surrogate_models/orthogonal_matching_pursuit.py
 src/bayesvalidrox/surrogate_models/reg_fast_ard.py
 src/bayesvalidrox/surrogate_models/reg_fast_laplace.py
-src/bayesvalidrox/surrogate_models/sequential_design.py
 src/bayesvalidrox/surrogate_models/surrogate_models.py
-tests/test_BayesModelComparison.py
 tests/test_Discrepancy.py
 tests/test_ExpDesign.py
 tests/test_Input.py
diff --git a/src/bayesvalidrox/bayes_inference/bayes_inference.py b/src/bayesvalidrox/bayes_inference/bayes_inference.py
index c7cfe78cd..1898a8ae6 100644
--- a/src/bayesvalidrox/bayes_inference/bayes_inference.py
+++ b/src/bayesvalidrox/bayes_inference/bayes_inference.py
@@ -1,25 +1,25 @@
 #!/usr/bin/env python3
 # -*- coding: utf-8 -*-
 
-import copy
-import gc
-import multiprocessing
-import os
-
-import corner
-import h5py
-import matplotlib.lines as mlines
-import matplotlib.pylab as plt
 import numpy as np
+import os
+import copy
 import pandas as pd
+from tqdm import tqdm
+from scipy import stats
 import scipy.linalg as spla
+import joblib
 import seaborn as sns
-from matplotlib.backends.backend_pdf import PdfPages
-from matplotlib.patches import Patch
-from scipy import stats
-from sklearn import preprocessing
+import corner
+import h5py
+import multiprocessing
+import gc
 from sklearn.metrics import mean_squared_error, r2_score
-from tqdm import tqdm
+from sklearn import preprocessing
+from matplotlib.patches import Patch
+import matplotlib.lines as mlines
+from matplotlib.backends.backend_pdf import PdfPages
+import matplotlib.pylab as plt
 
 from .mcmc import MCMC
 
@@ -28,92 +28,6 @@ plt.style.use(os.path.join(os.path.split(__file__)[0],
                            '../', 'bayesvalidrox.mplstyle'))
 
 
-# -------------------------------------------------------------------------
-def _kernel_rbf(X, hyperparameters):
-    """
-    Isotropic squared exponential kernel.
-
-    Higher l values lead to smoother functions and therefore to coarser
-    approximations of the training data. Lower l values make functions
-    more wiggly with wide uncertainty regions between training data points.
-
-    sigma_f controls the marginal variance of b(x)
-
-    Parameters
-    ----------
-    X : ndarray of shape (n_samples_X, n_features)
-
-    hyperparameters : Dict
-        Lambda characteristic length
-        sigma_f controls the marginal variance of b(x)
-        sigma_0 unresolvable error nugget term, interpreted as random
-                error that cannot be attributed to measurement error.
-    Returns
-    -------
-    var_cov_matrix : ndarray of shape (n_samples_X,n_samples_X)
-        Kernel k(X, X).
-
-    """
-    from sklearn.gaussian_process.kernels import RBF
-    min_max_scaler = preprocessing.MinMaxScaler()
-    X_minmax = min_max_scaler.fit_transform(X)
-
-    nparams = len(hyperparameters)
-    if nparams < 3:
-        raise AttributeError('Provide 3 parameters for the RBF kernel!')
-
-    # characteristic length (0,1]
-    Lambda = hyperparameters[0]
-    # sigma_f controls the marginal variance of b(x)
-    sigma2_f = hyperparameters[1]
-
-    rbf = RBF(length_scale=Lambda)
-    cov_matrix = sigma2_f * rbf(X_minmax)
-
-    # (unresolvable error) nugget term that is interpreted as random
-    # error that cannot be attributed to measurement error.
-    sigma2_0 = hyperparameters[2:]
-    for i, j in np.ndindex(cov_matrix.shape):
-        cov_matrix[i, j] += np.sum(sigma2_0) if i == j else 0
-
-    return cov_matrix
-
-
-# -------------------------------------------------------------------------
-def _logpdf(x, mean, cov):
-    """
-    Computes the likelihood based on a multivariate normal distribution.
-
-    Parameters
-    ----------
-    x : TYPE
-        DESCRIPTION.
-    mean : array_like
-        Observation data.
-    cov : 2d array
-        Covariance matrix of the distribution.
-
-    Returns
-    -------
-    log_lik : float
-        Log likelihood.
-
-    """
-
-    # Tranform into np arrays
-    x = np.array(x)
-    mean = np.array(mean)
-    cov = np.array(cov)
-
-    n = len(mean)
-    L = spla.cholesky(cov, lower=True)
-    beta = np.sum(np.log(np.diag(L)))
-    dev = x - mean
-    alpha = dev.dot(spla.cho_solve((L, True), dev))
-    log_lik = -0.5 * alpha - beta - n / 2. * np.log(2 * np.pi)
-    return log_lik
-
-
 class BayesInference:
     """
     A class to perform Bayesian Analysis.
@@ -128,7 +42,7 @@ class BayesInference:
         of the variance matrix for a multivariate normal likelihood.
     name : str, optional
         The type of analysis, either calibration (`Calib`) or validation
-        (`Valid`). The default is `'Calib'`. # TODO: what is going on here for validation?
+        (`Valid`). The default is `'Calib'`.
     emulator : bool, optional
         Analysis with emulator (MetaModel). The default is `True`.
     bootstrap : bool, optional
@@ -141,11 +55,11 @@ class BayesInference:
         A dictionary with the selected indices of each model output. The
         default is `None`. If `None`, all measurement points are used in the
         analysis.
-    prior_samples : array of shape (n_samples, n_params), optional
+    samples : array of shape (n_samples, n_params), optional
         The samples to be used in the analysis. The default is `None`. If
         None the samples are drawn from the probablistic input parameter
         object of the MetaModel object.
-    n_prior_samples : int, optional
+    n_samples : int, optional
         Number of samples to be used in the analysis. The default is `500000`.
         If samples is not `None`, this argument will be assigned based on the
         number of samples given.
@@ -211,32 +125,26 @@ class BayesInference:
 
     """
 
-    def __init__(self, engine, discrepancy=None, emulator=True,
+    def __init__(self, engine, MetaModel = None, discrepancy=None, emulator=True,
                  name='Calib', bootstrap=False, req_outputs=None,
-                 selected_indices=None, prior_samples=None, n_prior_samples=100000,
+                 selected_indices=None, samples=None, n_samples=100000,
                  measured_data=None, inference_method='rejection',
                  mcmc_params=None, bayes_loocv=False, n_bootstrap_itrs=1,
                  perturbed_data=[], bootstrap_noise=0.05, just_analysis=False,
                  valid_metrics=['BME'], plot_post_pred=True,
                  plot_map_pred=False, max_a_posteriori='mean',
-                 corner_title_fmt='.2e', out_dir='', bmc=False):
-
-        self.log_BME_tom = None
-        self.inf_entropy = None
-        self.log_BME = None
-        self.KLD = None
-        self.__mean_pce_prior_pred = None
-        if perturbed_data is None:
-            perturbed_data = []
+                 corner_title_fmt='.2e'):
+
         self.engine = engine
+        self.MetaModel = engine.MetaModel
         self.Discrepancy = discrepancy
         self.emulator = emulator
         self.name = name
         self.bootstrap = bootstrap
         self.req_outputs = req_outputs
         self.selected_indices = selected_indices
-        self.prior_samples = prior_samples
-        self.n_prior_samples = n_prior_samples
+        self.samples = samples
+        self.n_samples = n_samples
         self.measured_data = measured_data
         self.inference_method = inference_method
         self.mcmc_params = mcmc_params
@@ -250,63 +158,44 @@ class BayesInference:
         self.plot_map_pred = plot_map_pred
         self.max_a_posteriori = max_a_posteriori
         self.corner_title_fmt = corner_title_fmt
-        self.out_dir = out_dir
-
-        # Other properties and parameters (found in code, but never set)
-        self.error_model = False  # TODO: no example or use case for this!
-        self.bias_inputs = None
-        self.measurement_error = None  # TODO: what is this?
-        self.sigma2s = None
-        self.log_likes = None
-        self.n_tot_measurement = None
-        self.Discrepancy = None
-        self.posterior_df = None
-        self.error_MetaModel = None
-        self._mean_pce_prior_pred = None
-        self._std_pce_prior_pred = None
-        self.__model_prior_pred = None
-        self.bmc = bmc  # Set to True, if you want to cut short to only Model Comparison
-
-        # System settings
-        if os.name == 'nt':
-            print('')
-            print('WARNING: Performing the inference on windows can lead to reduced accuracy!')
-            print('')
-            self.dtype = np.longdouble
-        else:
-            self.dtype = np.float128
 
-    def setup_inference(self):
+    # -------------------------------------------------------------------------
+    def create_inference(self):
         """
-        This function sets up the inference by checking the inputs and getting 
-        needed data.
+        Starts the inference.
+
+        Returns
+        -------
+        BayesInference : obj
+            The Bayes inference object.
+
         """
-        Model = self.engine.Model
 
-        # Create output directory
-        if self.out_dir == '':
-            self.out_dir = f'Outputs_Bayes_{self.engine.Model.name}_{self.name}'
-        os.makedirs(self.out_dir, exist_ok=True)
+        # Set some variables
+        MetaModel = self.MetaModel
+        Model = self.engine.Model
+        n_params = MetaModel.n_params
+        output_names = Model.Output.names
+        par_names = self.engine.ExpDesign.par_names
 
-        # If the prior is set by the user, take it, else generate from ExpDes
-        if self.prior_samples is None:
-            self.prior_samples = self.engine.ExpDesign.generate_samples(
-                self.n_prior_samples, 'random')
+        # If the prior is set by the user, take it.
+        if self.samples is None:
+            self.samples = self.engine.ExpDesign.generate_samples(
+                self.n_samples, 'random')
         else:
             try:
-                samples = self.prior_samples.values
+                samples = self.samples.values
             except AttributeError:
-                samples = self.prior_samples
+                samples = self.samples
 
             # Take care of an additional Sigma2s
-            self.prior_samples = samples[:, :self.engine.MetaModel.n_params]
+            self.samples = samples[:, :n_params]
 
             # Update number of samples
-            self.n_prior_samples = self.prior_samples.shape[0]
+            self.n_samples = self.samples.shape[0]
 
         # ---------- Preparation of observation data ----------
-        # Read observation data 
-        # TODO: later use valid #of measurements. but here only get the model observations?
+        # Read observation data and perturb it if requested.
         if self.measured_data is None:
             self.measured_data = Model.read_observation(case=self.name)
         # Convert measured_data to a data frame
@@ -316,13 +205,11 @@ class BayesInference:
         # Extract the total number of measurement points
         if self.name.lower() == 'calib':
             self.n_tot_measurement = Model.n_obs
-        elif self.name.lower() == 'valid':
-            self.n_tot_measurement = Model.n_obs_valid
         else:
-            raise AttributeError('The set inference type is not known! Use either `calib` or `valid`')
+            self.n_tot_measurement = Model.n_obs_valid
 
         # Find measurement error (if not given) for post predictive plot
-        if self.measurement_error is None:
+        if not hasattr(self, 'measurement_error'):
             if isinstance(self.Discrepancy, dict):
                 Disc = self.Discrepancy['known']
             else:
@@ -335,368 +222,383 @@ class BayesInference:
                     self.measurement_error = np.sqrt(Disc.parameters)
                 except TypeError:
                     pass
-        # TODO: need a transformation for given measurement error?
 
-        # Get Discrepancy type
+        # ---------- Preparation of variance for covariance matrix ----------
+        # Independent and identically distributed
+        total_sigma2 = dict()
         opt_sigma_flag = isinstance(self.Discrepancy, dict)
         opt_sigma = None
-        # Option A: known error with unknown bias term
-        if opt_sigma_flag and opt_sigma is None:
-            opt_sigma = 'A'
-        # Option B: The sigma2 is known (no bias term)
-        elif self.Discrepancy.parameters is not None:
-            opt_sigma = 'B'
-        # Option C: The sigma2 is unknown (bias term including error)
-        elif not isinstance(self.Discrepancy.InputDisc, str):
-            opt_sigma = 'C'
-        self.Discrepancy.opt_sigma = opt_sigma
-
-        # Set MCMC params if used
-        if self.inference_method.lower() == 'mcmc':
-            if self.mcmc_params is None:
-                self.mcmc_params = {}
-            par_list = ['init_samples', 'n_walkers', 'n_burn', 'n_steps',
-                        'moves', 'multiprocessing', 'verbose']
-            init_val = [None, 100, 200, 100000, None, False, False]
-            for i in range(len(par_list)):
-                if par_list[i] not in list(self.mcmc_params.keys()):
-                    self.mcmc_params[par_list[i]] = init_val[i]
+        for key_idx, key in enumerate(output_names):
 
-    # -------------------------------------------------------------------------
-    def create_inference(self):
-        """
-        Starts the inference.
-
-        Returns
-        -------
-        BayesInference : obj
-            The Bayes inference object.
-            
-        # TODO: should this function really return the class?
-
-        """
-        # Do general set up and check some parameters
-        self.setup_inference()
-
-        # ---------- Preparation of variance for covariance matrix ----------
-        # Independent and identically distributed # TODO: ??
-        total_sigma2 = dict()
-        opt_sigma = self.Discrepancy.opt_sigma
-        for key_idx, key in enumerate(self.engine.Model.Output.names):
             # Find opt_sigma
-            sigma2 = None
-            if opt_sigma == 'A':
+            if opt_sigma_flag and opt_sigma is None:
+                # Option A: known error with unknown bias term
+                opt_sigma = 'A'
                 known_discrepancy = self.Discrepancy['known']
                 self.Discrepancy = self.Discrepancy['infer']
                 sigma2 = np.array(known_discrepancy.parameters[key])
 
-            elif opt_sigma == 'B':
-                sigma2 = np.array(self.Discrepancy.parameters[key])
+            elif opt_sigma == 'A' or self.Discrepancy.parameters is not None:
+                # Option B: The sigma2 is known (no bias term)
+                if opt_sigma == 'A':
+                    sigma2 = np.array(known_discrepancy.parameters[key])
+                else:
+                    opt_sigma = 'B'
+                    sigma2 = np.array(self.Discrepancy.parameters[key])
 
-            elif opt_sigma == 'C':
+            elif not isinstance(self.Discrepancy.InputDisc, str):
+                # Option C: The sigma2 is unknown (bias term including error)
+                opt_sigma = 'C'
+                self.Discrepancy.opt_sigma = opt_sigma
                 n_measurement = self.measured_data[key].values.shape
                 sigma2 = np.zeros((n_measurement[0]))
+
             total_sigma2[key] = sigma2
 
-        self.Discrepancy.total_sigma2 = total_sigma2
+            self.Discrepancy.opt_sigma = opt_sigma
+            self.Discrepancy.total_sigma2 = total_sigma2
 
         # If inferred sigma2s obtained from e.g. calibration are given
         try:
-            self.sigma2s = self.Discrepancy.get_sample(self.n_prior_samples)
+            self.sigma2s = self.Discrepancy.get_sample(self.n_samples)
         except:
-            pass  # TODO: should an error be raised in this case? Should this at least be checked against opt_sigma?
+            pass
 
         # ---------------- Bootstrap & TOM --------------------
         if self.bootstrap or self.bayes_loocv or self.just_analysis:
-            self.perform_bootstrap(total_sigma2)
-            if self.bmc:
-                return self
-        else:
-            print('No bootstrap for TOM performed!')  # TODO: stop the code? Use n_bootstrap = 1?
+            if len(self.perturbed_data) == 0:
+                # zero mean noise Adding some noise to the observation function
+                self.perturbed_data = self._perturb_data(
+                    self.measured_data, output_names
+                    )
+            else:
+                self.n_bootstrap_itrs = len(self.perturbed_data)
 
-        # ---------------- Parameter Bayesian inference ----------------
-        # Convert to a dataframe if samples are provided after calibration.
-        MCMC_Obj = None
-        if self.name.lower() == 'valid':
-            self.posterior_df = pd.DataFrame(self.prior_samples, columns=self.engine.ExpDesign.par_names)
-        # Instantiate the MCMC object
-        elif self.inference_method.lower() == 'mcmc':
-            MCMC_Obj = MCMC(self)
-            self.posterior_df = MCMC_Obj.run_sampler(
-                self.measured_data, total_sigma2
-            )
-        # Rejection sampling
-        elif self.inference_method.lower() == 'rejection':
-            self.posterior_df = self._rejection_sampling()
-        else:
-            raise AttributeError('The chosen inference method is not available!')
+            # -------- Model Discrepancy -----------
+            if hasattr(self, 'error_model') and self.error_model \
+               and self.name.lower() != 'calib':
+                # Select posterior mean as MAP
+                MAP_theta = self.samples.mean(axis=0).reshape((1, n_params))
+                # MAP_theta = stats.mode(self.samples,axis=0)[0]
 
-        # Provide posterior's summary
-        print('\n')
-        print('-' * 15 + 'Posterior summary' + '-' * 15)
-        pd.options.display.max_columns = None
-        pd.options.display.max_rows = None
-        print(self.posterior_df.describe())
-        print('-' * 50)
+                # Evaluate the (meta-)model at the MAP
+                y_MAP, y_std_MAP = MetaModel.eval_metamodel(samples=MAP_theta)
 
-        # -------- Model Discrepancy -----------
-        if self.error_model and self.name.lower() == 'calib' and MCMC_Obj is not None:  # TODO: where is this used
-            # and what does it actually do there?
-            self.create_error_model(opt_sigma=opt_sigma,
-                                    type_='posterior', sampler=MCMC_Obj)
+                # Train a GPR meta-model using MAP
+                self.error_MetaModel = MetaModel.create_model_error(
+                    self.bias_inputs, y_MAP, Name=self.name
+                    )
 
-        # -------- Posterior predictive -----------
-        self._posterior_predictive()
+            # -----------------------------------------------------
+            # ----- Loop over the perturbed observation data ------
+            # -----------------------------------------------------
+            # Initilize arrays
+            logLikelihoods = np.zeros((self.n_samples, self.n_bootstrap_itrs),
+                                      dtype=np.float16)
+            BME_Corr = np.zeros((self.n_bootstrap_itrs))
+            log_BME = np.zeros((self.n_bootstrap_itrs))
+            KLD = np.zeros((self.n_bootstrap_itrs))
+            inf_entropy = np.zeros((self.n_bootstrap_itrs))
+
+            # Compute the prior predtions
+            # Evaluate the MetaModel
+            if self.emulator:
+                y_hat, y_std = MetaModel.eval_metamodel(samples=self.samples)
+                self.__mean_pce_prior_pred = y_hat
+                self._std_pce_prior_pred = y_std
 
-        # ------------------ Visualization --------------------
-        # Posterior parameters
-        self.plot_post_params(opt_sigma)
+                # Correct the predictions with Model discrepancy
+                if hasattr(self, 'error_model') and self.error_model:
+                    y_hat_corr, y_std = self.error_MetaModel.eval_model_error(
+                        self.bias_inputs, self.__mean_pce_prior_pred
+                        )
+                    self.__mean_pce_prior_pred = y_hat_corr
+                    self._std_pce_prior_pred = y_std
 
-        # Plot MAP
-        if self.plot_map_pred:
-            self._plot_max_a_posteriori()
+                # Surrogate model's error using RMSE of test data
+                if hasattr(MetaModel, 'rmse'):
+                    surrError = MetaModel.rmse
+                else:
+                    surrError = None
 
-        # Plot log_BME dist
-        if self.bootstrap:
-            self.plot_log_BME()
+            else:
+                # Evaluate the original model
+                self.__model_prior_pred = self._eval_model(
+                    samples=self.samples, key='PriorPred'
+                    )
+                surrError = None
 
-        # Plot posterior predictive
-        if self.plot_post_pred:
-            self._plot_post_predictive()
+            # Start the likelihood-BME computations for the perturbed data
+            for itr_idx, data in tqdm(
+                    enumerate(self.perturbed_data),
+                    total=self.n_bootstrap_itrs,
+                    desc="Bootstrapping the BME calculations", ascii=True
+                    ):
 
-        return self
+                # ---------------- Likelihood calculation ----------------
+                if self.emulator:
+                    model_evals = self.__mean_pce_prior_pred
+                else:
+                    model_evals = self.__model_prior_pred
+
+                # Leave one out
+                if self.bayes_loocv or self.just_analysis:
+                    self.selected_indices = np.nonzero(data)[0]
+
+                # Prepare data dataframe
+                nobs = list(self.measured_data.count().values[1:])
+                numbers = list(np.cumsum(nobs))
+                indices = list(zip([0] + numbers, numbers))
+                data_dict = {
+                    output_names[i]: data[j:k] for i, (j, k) in
+                    enumerate(indices)
+                    }
+                #print(output_names)
+                #print(indices)
+                #print(numbers)
+                #print(nobs)
+                #print(self.measured_data)
+                #for i, (j, k) in enumerate(indices):
+                #    print(i,j,k)
+                #print(data)
+                #print(data_dict)
+                #stop
+
+                # Unknown sigma2
+                if opt_sigma == 'C' or hasattr(self, 'sigma2s'):
+                    logLikelihoods[:, itr_idx] = self.normpdf(
+                        model_evals, data_dict, total_sigma2,
+                        sigma2=self.sigma2s, std=surrError
+                        )
+                else:
+                    # known sigma2
+                    logLikelihoods[:, itr_idx] = self.normpdf(
+                        model_evals, data_dict, total_sigma2,
+                        std=surrError
+                        )
 
-    def create_error_model(self, type_='posterior', opt_sigma='B', sampler=None):
-        """
-        Creates an error model in the engine.MetaModel based on input dist 
-        samples of the chosen type
+                # ---------------- BME Calculations ----------------
+                # BME (log)
+                log_BME[itr_idx] = np.log(
+                    np.nanmean(np.exp(logLikelihoods[:, itr_idx],
+                                      dtype=np.longdouble))#float128))
+                    )
 
-        Parameters
-        ----------
-        opt_sigma : string, optional
-            Type of uncertainty description, only used if type_=='posterior'.
-            The default is 'B'
-        type_ : string
-            Type of parameter samples to use, either 'prior' or 'posterior'. 
-            The default is 'posterior'.
-        sampler : MCMC, optional
-            Should be an MCMC object if type=='posterior' and MCMC is used in 
-            the inference.In al other cases this parameter is not needed.
+                # BME correction when using Emulator
+                if self.emulator:
+                    BME_Corr[itr_idx] = self.__corr_factor_BME(
+                        data_dict, total_sigma2, log_BME[itr_idx]
+                        )
 
-        Returns
-        -------
-        None.
+                # Rejection Step
+                if 'kld' in list(map(str.lower, self.valid_metrics)) and\
+                   'inf_entropy' in list(map(str.lower, self.valid_metrics)):
+                    # Random numbers between 0 and 1
+                    unif = np.random.rand(1, self.n_samples)[0]
+
+                    # Reject the poorly performed prior
+                    Likelihoods = np.exp(logLikelihoods[:, itr_idx],
+                                         dtype=np.float64)
+                    accepted = (Likelihoods/np.max(Likelihoods)) >= unif
+                    posterior = self.samples[accepted]
+
+                    # Posterior-based expectation of likelihoods
+                    postExpLikelihoods = np.mean(
+                        logLikelihoods[:, itr_idx][accepted]
+                        )
 
-        """
-        n_params = self.engine.MetaModel.n_params
+                    # Calculate Kullback-Leibler Divergence
+                    KLD[itr_idx] = postExpLikelihoods - log_BME[itr_idx]
+
+                # Posterior-based expectation of prior densities
+                if 'inf_entropy' in list(map(str.lower, self.valid_metrics)):
+                    n_thread = int(0.875 * multiprocessing.cpu_count())
+                    with multiprocessing.Pool(n_thread) as p:
+                        postExpPrior = np.mean(np.concatenate(
+                            p.map(
+                                self.engine.ExpDesign.JDist.pdf,
+                                np.array_split(posterior.T, n_thread, axis=1))
+                            )
+                            )
+                    # Information Entropy based on Entropy paper Eq. 38
+                    inf_entropy[itr_idx] = log_BME[itr_idx] - postExpPrior - \
+                        postExpLikelihoods
+
+                # Clear memory
+                gc.collect(generation=2)
+
+            # ---------- Store metrics for perturbed data set ----------------
+            # Likelihoods (Size: n_samples, n_bootstrap_itr)
+            self.log_likes = logLikelihoods
+
+            # BME (log), KLD, infEntropy (Size: 1,n_bootstrap_itr)
+            self.log_BME = log_BME
+
+            # BMECorrFactor (log) (Size: 1,n_bootstrap_itr)
+            if self.emulator:
+                self.log_BME_corr_factor = BME_Corr
+
+            if 'kld' in list(map(str.lower, self.valid_metrics)):
+                self.KLD = KLD
+            if 'inf_entropy' in list(map(str.lower, self.valid_metrics)):
+                self.inf_entropy = inf_entropy
+
+            # BME = BME + BMECorrFactor
+            if self.emulator:
+                self.log_BME += self.log_BME_corr_factor
+
+        # ---------------- Parameter Bayesian inference ----------------
+        if self.inference_method.lower() == 'mcmc':
+            # Instantiate the MCMC object
+            MCMC_Obj = MCMC(self)
+            self.posterior_df = MCMC_Obj.run_sampler(
+                self.measured_data, total_sigma2
+                )
 
-        # Get MAP estimate from prior samples
-        if type_ == 'prior':
-            # Select prior ? mean as MAP
-            MAP_theta = self.prior_samples.mean(axis=0).reshape((1, n_params))
+        elif self.name.lower() == 'valid':
+            # Convert to a dataframe if samples are provided after calibration.
+            self.posterior_df = pd.DataFrame(self.samples, columns=par_names)
 
-            # Evaluate the (meta-)model at the MAP
-            y_MAP, y_std_MAP = self.engine.MetaModel.eval_metamodel(samples=MAP_theta)
+        else:
+            # Rejection sampling
+            self.posterior_df = self._rejection_sampling()
 
-            # Train a GPR meta-model using MAP
-            self.error_MetaModel = self.engine.MetaModel.create_model_error(
-                self.bias_inputs, y_MAP, self.measured_data, name=self.name
-            )
+        # Provide posterior's summary
+        print('\n')
+        print('-'*15 + 'Posterior summary' + '-'*15)
+        pd.options.display.max_columns = None
+        pd.options.display.max_rows = None
+        print(self.posterior_df.describe())
+        print('-'*50)
 
-        # Get MAP estimate from posterior samples
-        if type_ == 'posterior':
+        # -------- Model Discrepancy -----------
+        if hasattr(self, 'error_model') and self.error_model \
+           and self.name.lower() == 'calib':
             if self.inference_method.lower() == 'mcmc':
-                self.error_MetaModel = sampler.error_MetaModel
+                self.error_MetaModel = MCMC_Obj.error_MetaModel
             else:
                 # Select posterior mean as MAP
                 if opt_sigma == "B":
                     posterior_df = self.posterior_df.values
                 else:
-                    posterior_df = self.posterior_df.values[:, :-self.engine.Model.n_outputs]
+                    posterior_df = self.posterior_df.values[:, :-Model.n_outputs]
 
                 # Select posterior mean as Maximum a posteriori
                 map_theta = posterior_df.mean(axis=0).reshape((1, n_params))
                 # map_theta = stats.mode(Posterior_df,axis=0)[0]
 
                 # Evaluate the (meta-)model at the MAP
-                y_MAP, y_std_MAP = self.engine.MetaModel.eval_metamodel(samples=map_theta)
+                y_MAP, y_std_MAP = MetaModel.eval_metamodel(samples=map_theta)
 
                 # Train a GPR meta-model using MAP
-                self.error_MetaModel = self.engine.MetaModel.create_model_error(
-                    self.bias_inputs, y_MAP, self.measured_data, name=self.name
+                self.error_MetaModel = MetaModel.create_model_error(
+                    self.bias_inputs, y_MAP, Name=self.name
+                    )
+
+        # -------- Posterior perdictive -----------
+        self._posterior_predictive()
+
+        # -----------------------------------------------------
+        # ------------------ Visualization --------------------
+        # -----------------------------------------------------
+        # Create Output directory, if it doesn't exist already.
+        out_dir = f'Outputs_Bayes_{Model.name}_{self.name}'
+        os.makedirs(out_dir, exist_ok=True)
+
+        # -------- Posteior parameters --------
+        if opt_sigma != "B":
+            par_names.extend(
+                [self.Discrepancy.InputDisc.Marginals[i].name for i
+                 in range(len(self.Discrepancy.InputDisc.Marginals))]
                 )
+        # Pot with corner
+        figPosterior = corner.corner(self.posterior_df.to_numpy(),
+                                     labels=par_names,
+                                     quantiles=[0.15, 0.5, 0.85],
+                                     show_titles=True,
+                                     title_fmt=self.corner_title_fmt,
+                                     labelpad=0.2,
+                                     use_math_text=True,
+                                     title_kwargs={"fontsize": 28},
+                                     plot_datapoints=False,
+                                     plot_density=False,
+                                     fill_contours=True,
+                                     smooth=0.5,
+                                     smooth1d=0.5)
 
-    def perform_bootstrap(self, total_sigma2):
-        """
-        Perform bootstrap to get TOM (??)
-        
-        Parameters
-        ----------
-        total_sigma2 : dict
-            Dictionary containing the sigma2 for the training(?) data
-        Returns
-        -------
-        None.
+        # Loop over axes and set x limits
+        if opt_sigma == "B":
+            axes = np.array(figPosterior.axes).reshape(
+                (len(par_names), len(par_names))
+                )
+            for yi in range(len(par_names)):
+                ax = axes[yi, yi]
+                ax.set_xlim(self.engine.ExpDesign.bound_tuples[yi])
+                for xi in range(yi):
+                    ax = axes[yi, xi]
+                    ax.set_xlim(self.engine.ExpDesign.bound_tuples[xi])
+        plt.close()
 
-        """
-        MetaModel = self.engine.MetaModel
-        output_names = self.engine.Model.Output.names
-        opt_sigma = self.Discrepancy.opt_sigma
+        # Turn off gridlines
+        for ax in figPosterior.axes:
+            ax.grid(False)
 
-        # Adding some zero mean noise to the observation function
-        if len(self.perturbed_data) == 0:
-            self.perturbed_data = self._perturb_data(
-                self.measured_data, output_names
-            )
+        if self.emulator:
+            plotname = f'/Posterior_Dist_{Model.name}_emulator'
         else:
-            self.n_bootstrap_itrs = len(self.perturbed_data)
+            plotname = f'/Posterior_Dist_{Model.name}'
 
-        # -------- Model Discrepancy -----------
-        if self.error_model and self.name.lower() == 'valid':  # TODO: what should be set so that this is tested?
-            self.create_error_model(type_='prior')
-        # -----------------------------------------------------
-        # ----- Loop over the perturbed observation data ------
-        # -----------------------------------------------------
-        # Initilize arrays
-        logLikelihoods = np.zeros((self.n_prior_samples, self.n_bootstrap_itrs),
-                                  dtype=np.float16)
-        BME_Corr = np.zeros(self.n_bootstrap_itrs)
-        log_BME = np.zeros(self.n_bootstrap_itrs)
-        KLD = np.zeros(self.n_bootstrap_itrs)
-        inf_entropy = np.zeros(self.n_bootstrap_itrs)
-
-        # Compute the prior predictions
-        # Evaluate the MetaModel
-        if self.emulator:
-            y_hat, y_std = MetaModel.eval_metamodel(samples=self.prior_samples)
-            self.__mean_pce_prior_pred = y_hat
-            self._std_pce_prior_pred = y_std
-
-            # Correct the predictions with Model discrepancy
-            if self.error_model:  # TODO this does not check for calib?
-                y_hat_corr, y_std = self.error_MetaModel.eval_model_error(
-                    self.bias_inputs, self.__mean_pce_prior_pred)
-                self.__mean_pce_prior_pred = y_hat_corr
-                self._std_pce_prior_pred = y_std
+        figPosterior.set_size_inches((24, 16))
+        figPosterior.savefig(f'./{out_dir}{plotname}.pdf',
+                             bbox_inches='tight')
 
-            # Surrogate model's error using RMSE of test data
-            if MetaModel.rmse is not None:
-                surrError = MetaModel.rmse
-            else:
-                surrError = None
-            model_evals = self.__mean_pce_prior_pred
+        # -------- Plot MAP --------
+        if self.plot_map_pred:
+            self._plot_max_a_posteriori()
 
-        # Evaluate the model
-        else:
-            self.__model_prior_pred = self._eval_model(
-                samples=self.prior_samples, key='PriorPred')
-            model_evals = self.__model_prior_pred
-            surrError = None
-
-        # Start the likelihood-BME computations for the perturbed data
-        for itr_idx, data in tqdm(
-                enumerate(self.perturbed_data),
-                total=self.n_bootstrap_itrs,
-                desc="Bootstrapping the BME calculations", ascii=True
-        ):
-
-            # ---------------- Likelihood calculation ----------------
-            # Leave one out (see form of perturbed data)
-            if self.bayes_loocv or self.just_analysis:
-                # Consider only non-zero entries
-                self.selected_indices = np.nonzero(data)[0]
-
-            # Prepare data dataframe # TODO: what's with this transformation?
-            nobs = list(self.measured_data.count().values[1:])
-            numbers = list(np.cumsum(nobs))
-            indices = list(zip([0] + numbers, numbers))
-            data_dict = {
-                output_names[i]: data[j:k] for i, (j, k) in
-                enumerate(indices)
-            }
-
-            # Unknown sigma2
-            if opt_sigma == 'C' or self.sigma2s is not None:
-                logLikelihoods[:, itr_idx] = self.normpdf(
-                    model_evals, data_dict, total_sigma2,
-                    sigma2=self.sigma2s, std=surrError
-                )
-            else:
-                # known sigma2
-                logLikelihoods[:, itr_idx] = self.normpdf(
-                    model_evals, data_dict, total_sigma2,
-                    std=surrError
-                )
-            # ---------------- BME Calculations ----------------
-            # BME (log)
-            log_BME[itr_idx] = np.log(
-                np.nanmean(np.exp(logLikelihoods[:, itr_idx],
-                                  dtype=self.dtype))
-            )
-
-            # BME correction when using Emulator
-            if self.emulator:
-                BME_Corr[itr_idx] = self._corr_factor_BME(
-                    data_dict, total_sigma2, log_BME[itr_idx]
-                )
+        # -------- Plot log_BME dist --------
+        if self.bootstrap:
 
-            # Rejection Step
-            if 'kld' in list(map(str.lower, self.valid_metrics)) and \
-                    'inf_entropy' in list(map(str.lower, self.valid_metrics)):  # TODO: why and and not or?
-                # Random numbers between 0 and 1
-                unif = np.random.rand(1, self.n_prior_samples)[0]
-
-                # Reject the poorly performed prior
-                Likelihoods = np.exp(logLikelihoods[:, itr_idx],
-                                     dtype=np.float64)
-                accepted = (Likelihoods / np.max(Likelihoods)) >= unif
-                posterior = self.prior_samples[accepted]
-
-                # Posterior-based expectation of likelihoods
-                postExpLikelihoods = np.mean(
-                    logLikelihoods[:, itr_idx][accepted]
+            # Computing the TOM performance
+            self.log_BME_tom = stats.chi2.rvs(
+                self.n_tot_measurement, size=self.log_BME.shape[0]
                 )
 
-                # Calculate Kullback-Leibler Divergence
-                KLD[itr_idx] = postExpLikelihoods - log_BME[itr_idx]
+            fig, ax = plt.subplots()
+            sns.kdeplot(self.log_BME_tom, ax=ax, color="green", shade=True)
+            sns.kdeplot(
+                self.log_BME, ax=ax, color="blue", shade=True,
+                label='Model BME')
 
-            # Posterior-based expectation of prior densities
-            if 'inf_entropy' in list(map(str.lower, self.valid_metrics)):
-                n_thread = int(0.875 * multiprocessing.cpu_count())
-                with multiprocessing.Pool(n_thread) as p:
-                    postExpPrior = np.mean(np.concatenate(
-                        p.map(
-                            self.engine.ExpDesign.JDist.pdf,
-                            np.array_split(posterior.T, n_thread, axis=1))
-                    )
-                    )
-                # Information Entropy based on Entropy paper Eq. 38
-                inf_entropy[itr_idx] = log_BME[itr_idx] - postExpPrior - postExpLikelihoods
+            ax.set_xlabel('log$_{10}$(BME)')
+            ax.set_ylabel('Probability density')
 
-            # Clear memory
-            gc.collect(generation=2)
+            legend_elements = [
+                Patch(facecolor='green', edgecolor='green', label='TOM BME'),
+                Patch(facecolor='blue', edgecolor='blue', label='Model BME')
+                ]
+            ax.legend(handles=legend_elements)
 
-        # ---------- Store metrics for perturbed data set ----------------
-        # Likelihoods (Size: n_samples, n_bootstrap_itr)
-        self.log_likes = logLikelihoods
+            if self.emulator:
+                plotname = f'/BME_hist_{Model.name}_emulator'
+            else:
+                plotname = f'/BME_hist_{Model.name}'
 
-        # BME (log), KLD, infEntropy (Size: 1,n_bootstrap_itr)
-        self.log_BME = log_BME
+            plt.savefig(f'./{out_dir}{plotname}.pdf', bbox_inches='tight')
+            plt.show()
+            plt.close()
 
-        # BMECorrFactor (log) (Size: 1,n_bootstrap_itr)
-        # BME = BME + BMECorrFactor
-        if self.emulator:
-            self.log_BME += BME_Corr
+        # -------- Posteior perdictives --------
+        if self.plot_post_pred:
+            # Plot the posterior predictive
+            self._plot_post_predictive()
 
-        if 'kld' in list(map(str.lower, self.valid_metrics)):
-            self.KLD = KLD
-        if 'inf_entropy' in list(map(str.lower, self.valid_metrics)):
-            self.inf_entropy = inf_entropy
+        return self
 
     # -------------------------------------------------------------------------
     def _perturb_data(self, data, output_names):
         """
-        Returns an array with n_bootstrap_itrs rows of perturbed data.
+        Returns an array with n_bootstrap_itrs rowsof perturbed data.
         The first row includes the original observation data.
         If `self.bayes_loocv` is True, a 2d-array will be returned with
         repeated rows and zero diagonal entries.
@@ -706,7 +608,7 @@ class BayesInference:
         data : pandas DataFrame
             Observation data.
         output_names : list
-            The output names.
+            List of the output names.
 
         Returns
         -------
@@ -718,11 +620,12 @@ class BayesInference:
         obs_data = data[output_names].values
         n_measurement, n_outs = obs_data.shape
         self.n_tot_measurement = obs_data[~np.isnan(obs_data)].shape[0]
+        # Number of bootstrap iterations
+        if self.bayes_loocv:
+            self.n_bootstrap_itrs = self.n_tot_measurement
 
         # Pass loocv dataset
         if self.bayes_loocv:
-            # Number of bootstrap iterations
-            self.n_bootstrap_itrs = self.n_tot_measurement
             obs = obs_data.T[~np.isnan(obs_data.T)]
             final_data = np.repeat(np.atleast_2d(obs), self.n_bootstrap_itrs,
                                    axis=0)
@@ -730,18 +633,15 @@ class BayesInference:
             return final_data
 
         else:
-            # Init return data with original data
             final_data = np.zeros(
                 (self.n_bootstrap_itrs, self.n_tot_measurement)
-            )
+                )
             final_data[0] = obs_data.T[~np.isnan(obs_data.T)]
             for itrIdx in range(1, self.n_bootstrap_itrs):
                 data = np.zeros((n_measurement, n_outs))
                 for idx in range(len(output_names)):
-                    # Perturb the data
                     std = np.nanstd(obs_data[:, idx])
                     if std == 0:
-                        print('Note: Use std=0.01 for perturbation')
                         std = 0.001
                     noise = std * noise_level
                     data[:, idx] = np.add(
@@ -753,17 +653,45 @@ class BayesInference:
 
             return final_data
 
+    # -------------------------------------------------------------------------
+    def _logpdf(self, x, mean, cov):
+        """
+        computes the likelihood based on a multivariate normal distribution.
+
+        Parameters
+        ----------
+        x : TYPE
+            DESCRIPTION.
+        mean : array_like
+            Observation data.
+        cov : 2d array
+            Covariance matrix of the distribution.
+
+        Returns
+        -------
+        log_lik : float
+            Log likelihood.
+
+        """
+        n = len(mean)
+        L = spla.cholesky(cov, lower=True)
+        beta = np.sum(np.log(np.diag(L)))
+        dev = x - mean
+        alpha = dev.dot(spla.cho_solve((L, True), dev))
+        log_lik = -0.5 * alpha - beta - n / 2. * np.log(2 * np.pi)
+        return log_lik
+
     # -------------------------------------------------------------------------
     def _eval_model(self, samples=None, key='MAP'):
         """
-        Evaluates Forward Model and zips the results
+        Evaluates Forward Model.
 
         Parameters
         ----------
         samples : array of shape (n_samples, n_params), optional
             Parameter sets. The default is None.
         key : str, optional
-            Descriptive key string for the run_model_parallel method.
+            Key string to be passed to the run_model_parallel method.
             The default is 'MAP'.
 
         Returns
@@ -772,17 +700,18 @@ class BayesInference:
             Model outputs.
 
         """
+        MetaModel = self.MetaModel
         Model = self.engine.Model
 
         if samples is None:
-            self.prior_samples = self.engine.ExpDesign.generate_samples(
-                self.n_prior_samples, 'random')
+            self.samples = self.engine.ExpDesign.generate_samples(
+                self.n_samples, 'random')
         else:
-            self.prior_samples = samples
-            self.n_prior_samples = len(samples)
+            self.samples = samples
+            self.n_samples = len(samples)
 
         model_outputs, _ = Model.run_model_parallel(
-            self.prior_samples, key_str=key + self.name)
+            self.samples, key_str=key+self.name)
 
         # Clean up
         # Zip the subdirectories
@@ -795,6 +724,55 @@ class BayesInference:
 
         return model_outputs
 
+    # -------------------------------------------------------------------------
+    def _kernel_rbf(self, X, hyperparameters):
+        """
+        Isotropic squared exponential kernel.
+
+        Higher l values lead to smoother functions and therefore to coarser
+        approximations of the training data. Lower l values make functions
+        more wiggly with wide uncertainty regions between training data points.
+
+        sigma_f controls the marginal variance of b(x)
+
+        Parameters
+        ----------
+        X : ndarray of shape (n_samples_X, n_features)
+
+        hyperparameters : Dict
+            Lambda characteristic length
+            sigma_f controls the marginal variance of b(x)
+            sigma_0 unresolvable error nugget term, interpreted as random
+                    error that cannot be attributed to measurement error.
+        Returns
+        -------
+        var_cov_matrix : ndarray of shape (n_samples_X,n_samples_X)
+            Kernel k(X, X).
+
+        """
+        from sklearn.gaussian_process.kernels import RBF
+        min_max_scaler = preprocessing.MinMaxScaler()
+        X_minmax = min_max_scaler.fit_transform(X)
+
+        nparams = len(hyperparameters)
+        # characteristic length (0,1]
+        Lambda = hyperparameters[0]
+        # sigma_f controls the marginal variance of b(x)
+        sigma2_f = hyperparameters[1]
+
+        # cov_matrix = sigma2_f*rbf_kernel(X_minmax, gamma = 1/Lambda**2)
+
+        rbf = RBF(length_scale=Lambda)
+        cov_matrix = sigma2_f * rbf(X_minmax)
+        if nparams > 2:
+            # (unresolvable error) nugget term that is interpreted as random
+            # error that cannot be attributed to measurement error.
+            sigma2_0 = hyperparameters[2:]
+            for i, j in np.ndindex(cov_matrix.shape):
+                cov_matrix[i, j] += np.sum(sigma2_0) if i == j else 0
+
+        return cov_matrix
+
     # -------------------------------------------------------------------------
     def normpdf(self, outputs, obs_data, total_sigma2s, sigma2=None, std=None):
         """
@@ -810,7 +788,7 @@ class BayesInference:
             A dictionary/dataframe containing the observation data.
         total_sigma2s : dict
             A dictionary with known values of the covariance diagonal entries,
-            a.k.a. sigma^2.
+            a.k.a sigma^2.
         sigma2 : array, optional
             An array of the sigma^2 samples, when the covariance diagonal
             entries are unknown and are being jointly inferred. The default is
@@ -831,11 +809,11 @@ class BayesInference:
 
         # Extract the requested model outputs for likelihood calulation
         if self.req_outputs is None:
-            req_outputs = Model.Output.names  # TODO: should this then be saved as self.req_outputs?
+            req_outputs = Model.Output.names
         else:
             req_outputs = list(self.req_outputs)
 
-        # Loop over the output keys
+        # Loop over the outputs
         for idx, out in enumerate(req_outputs):
 
             # (Meta)Model Output
@@ -847,25 +825,26 @@ class BayesInference:
             except AttributeError:
                 data = obs_data[out][~np.isnan(obs_data[out])]
 
-            # Prepare data uncertainty / error estimation (sigma2s)
+            # Prepare sigma2s
             non_nan_indices = ~np.isnan(total_sigma2s[out])
             tot_sigma2s = total_sigma2s[out][non_nan_indices][:nout]
 
-            # Add the std of the PCE if an emulator is used
+            # Add the std of the PCE is chosen as emulator.
             if self.emulator:
                 if std is not None:
-                    tot_sigma2s += std[out] ** 2
+                    tot_sigma2s += std[out]**2
+
+            # Covariance Matrix
+            covMatrix = np.diag(tot_sigma2s)
 
             # Select the data points to compare
             try:
                 indices = self.selected_indices[out]
             except:
                 indices = list(range(nout))
+            covMatrix = np.diag(covMatrix[indices, indices])
 
-            # Set up Covariance Matrix
-            covMatrix = np.diag(np.diag(tot_sigma2s)[indices, indices])
-
-            # If sigma2 is not given, use given total_sigma2s and move to next itr
+            # If sigma2 is not given, use given total_sigma2s
             if sigma2 is None:
                 logLik += stats.multivariate_normal.logpdf(
                     outputs[out][:, indices], data[indices], covMatrix)
@@ -881,24 +860,26 @@ class BayesInference:
                 # Covariance Matrix
                 covMatrix = np.diag(tot_sigma2s)
 
-                # Check the type error term
-                if self.bias_inputs is not None and self.error_model is None:
-                    # Infer a Bias model usig Gaussian Process Regression
-                    bias_inputs = np.hstack(
-                        (self.bias_inputs[out],
-                         tot_outputs[s_idx].reshape(-1, 1)))
-
-                    params = sigma2[s_idx, idx * 3:(idx + 1) * 3]
-                    covMatrix = _kernel_rbf(bias_inputs, params)
-                else:
-                    # Infer equal sigma2s
-                    try:
-                        sigma_2 = sigma2[s_idx, idx]
-                    except TypeError:
-                        sigma_2 = 0.0
+                if sigma2 is not None:
+                    # Check the type error term
+                    if hasattr(self, 'bias_inputs') and \
+                       not hasattr(self, 'error_model'):
+                        # Infer a Bias model usig Gaussian Process Regression
+                        bias_inputs = np.hstack(
+                            (self.bias_inputs[out],
+                             tot_outputs[s_idx].reshape(-1, 1)))
+
+                        params = sigma2[s_idx, idx*3:(idx+1)*3]
+                        covMatrix = self._kernel_rbf(bias_inputs, params)
+                    else:
+                        # Infer equal sigma2s
+                        try:
+                            sigma_2 = sigma2[s_idx, idx]
+                        except TypeError:
+                            sigma_2 = 0.0
 
-                    covMatrix += sigma_2 * np.eye(nout)
-                    # covMatrix = np.diag(sigma2 * total_sigma2s)
+                        covMatrix += sigma_2 * np.eye(nout)
+                        # covMatrix = np.diag(sigma2 * total_sigma2s)
 
                 # Select the data points to compare
                 try:
@@ -908,45 +889,86 @@ class BayesInference:
                 covMatrix = np.diag(covMatrix[indices, indices])
 
                 # Compute loglikelihood
-                logliks[s_idx] = _logpdf(
+                logliks[s_idx] = self._logpdf(
                     tot_outputs[s_idx, indices], data[indices], covMatrix
-                )
+                    )
+
             logLik += logliks
         return logLik
 
     # -------------------------------------------------------------------------
-    def _corr_factor_BME(self, obs_data, total_sigma2s, logBME):
+    def _corr_factor_BME_old(self, Data, total_sigma2s, posterior):
         """
         Calculates the correction factor for BMEs.
-        
-        Parameters
-        ----------
-        obs_data : dict
-            A dictionary/dataframe containing the observation data.
-        total_sigma2s : dict
-            A dictionary with known values of the covariance diagonal entries,
-            a.k.a sigma^2.
-        logBME : ??
-            The log_BME obtained from the estimated likelihoods
+        """
+        MetaModel = self.MetaModel
+        OrigModelOutput = self.engine.ExpDesign.Y
+        Model = self.engine.Model
 
-        Returns
-        -------
-        np.log(weights) : ??
-            Correction factors # TODO: factors or log of factors?
+        # Posterior with guassian-likelihood
+        postDist = stats.gaussian_kde(posterior.T)
+
+        # Remove NaN
+        Data = Data[~np.isnan(Data)]
+        total_sigma2s = total_sigma2s[~np.isnan(total_sigma2s)]
+
+        # Covariance Matrix
+        covMatrix = np.diag(total_sigma2s[:self.n_tot_measurement])
 
-        """
         # Extract the requested model outputs for likelihood calulation
-        MetaModel = self.engine.MetaModel
+        if self.req_outputs is None:
+            OutputType = Model.Output.names
+        else:
+            OutputType = list(self.req_outputs)
+
+        # SampleSize = OrigModelOutput[OutputType[0]].shape[0]
+
+
+        # Flatten the OutputType for OrigModel
+        TotalOutputs = np.concatenate([OrigModelOutput[x] for x in OutputType], 1)
+
+        NrofBayesSamples = self.n_samples
+        # Evaluate MetaModel on the experimental design
+        Samples = self.engine.ExpDesign.X
+        OutputRS, stdOutputRS = MetaModel.eval_metamodel(samples=Samples)
+
+        # Reset the NrofSamples to NrofBayesSamples
+        self.n_samples = NrofBayesSamples
+
+        # Flatten the OutputType for MetaModel
+        TotalPCEOutputs = np.concatenate([OutputRS[x] for x in OutputRS], 1)
+        TotalPCEstdOutputRS= np.concatenate([stdOutputRS[x] for x in stdOutputRS], 1)
+
+        logweight = 0
+        for i, sample in enumerate(Samples):
+            # Compute likelilhood output vs RS
+            covMatrix = np.diag(TotalPCEstdOutputRS[i]**2)
+            logLik = self._logpdf(TotalOutputs[i], TotalPCEOutputs[i], covMatrix)
+            # Compute posterior likelihood of the collocation points
+            logpostLik = np.log(postDist.pdf(sample[:, None]))[0]
+            if logpostLik != -np.inf:
+                logweight += logLik + logpostLik
+        return logweight
+
+    # -------------------------------------------------------------------------
+    def __corr_factor_BME(self, obs_data, total_sigma2s, logBME):
+        """
+        Calculates the correction factor for BMEs.
+        """
+        MetaModel = self.MetaModel
         samples = self.engine.ExpDesign.X
         model_outputs = self.engine.ExpDesign.Y
+        Model = self.engine.Model
         n_samples = samples.shape[0]
-        output_names = self.engine.Model.Output.names
+
+        # Extract the requested model outputs for likelihood calulation
+        output_names = Model.Output.names
 
         # Evaluate MetaModel on the experimental design and ValidSet
         OutputRS, stdOutputRS = MetaModel.eval_metamodel(samples=samples)
 
-        logLik_data = np.zeros(n_samples)
-        logLik_model = np.zeros(n_samples)
+        logLik_data = np.zeros((n_samples))
+        logLik_model = np.zeros((n_samples))
         # Loop over the outputs
         for idx, out in enumerate(output_names):
 
@@ -975,7 +997,7 @@ class BayesInference:
                 y_m_hat = OutputRS[out][i]
 
                 # CovMatrix with the surrogate error
-                covMatrix = np.eye(len(y_m)) * 1 / (2 * np.pi)
+                covMatrix = np.eye(len(y_m)) * 1/(2*np.pi)
 
                 # Select the data points to compare
                 try:
@@ -986,20 +1008,20 @@ class BayesInference:
                 covMatrix_data = np.diag(covMatrix_data[indices, indices])
 
                 # Compute likelilhood output vs data
-                logLik_data[i] += _logpdf(
+                logLik_data[i] += self._logpdf(
                     y_m_hat[indices], data[indices],
                     covMatrix_data
-                )
+                    )
 
                 # Compute likelilhood output vs surrogate
-                logLik_model[i] += _logpdf(
+                logLik_model[i] += self._logpdf(
                     y_m_hat[indices], y_m[indices],
                     covMatrix
-                )
+                    )
 
         # Weight
         logLik_data -= logBME
-        weights = np.mean(np.exp(logLik_model + logLik_data))
+        weights = np.mean(np.exp(logLik_model+logLik_data))
 
         return np.log(weights)
 
@@ -1015,43 +1037,45 @@ class BayesInference:
             Posterior samples of the input parameters.
 
         """
-        if self.prior_samples is None:
-            raise AttributeError('No prior samples available!')
 
-        if self.log_likes is None:
-            raise AttributeError('No log-likelihoods available!')
-
-        # Get sigmas # TODO: is this data uncertainty?
+        MetaModel = self.MetaModel
         try:
             sigma2_prior = self.Discrepancy.sigma2_prior
         except:
             sigma2_prior = None
 
-        # Combine samples and sigma2 for the return
-        samples = self.prior_samples
+        # Check if the discrepancy is defined as a distribution:
+        samples = self.samples
+
         if sigma2_prior is not None:
             samples = np.hstack((samples, sigma2_prior))
 
         # Take the first column of Likelihoods (Observation data without noise)
         if self.just_analysis or self.bayes_loocv:
-            index = self.n_tot_measurement - 1
+            index = self.n_tot_measurement-1
+            likelihoods = np.exp(self.log_likes[:, index], dtype=np.longdouble)#np.float128)
         else:
-            index = 0
-
-        # Use longdouble on windows, float128 on linux
-        likelihoods = np.exp(self.log_likes[:, index], dtype=self.dtype)
+            likelihoods = np.exp(self.log_likes[:, 0], dtype=np.longdouble)#np.float128)
 
         n_samples = len(likelihoods)
-        norm_likelihoods = likelihoods / np.max(likelihoods)
+        norm_ikelihoods = likelihoods / np.max(likelihoods)
 
         # Normalize based on min if all Likelihoods are zero
         if all(likelihoods == 0.0):
             likelihoods = self.log_likes[:, 0]
-            norm_likelihoods = likelihoods / np.min(likelihoods)
+            norm_ikelihoods = likelihoods / np.min(likelihoods)
 
-        # Reject the poorly performed prior compared to a uniform distribution
+        # Random numbers between 0 and 1
         unif = np.random.rand(1, n_samples)[0]
-        accepted_samples = samples[norm_likelihoods >= unif]
+
+        # Reject the poorly performed prior
+        accepted_samples = samples[norm_ikelihoods >= unif]
+
+        # Output the Posterior
+        par_names = self.engine.ExpDesign.par_names
+        if sigma2_prior is not None:
+            for name in self.Discrepancy.name:
+                par_names.append(name)
 
         return pd.DataFrame(accepted_samples, columns=sigma2_prior)
 
@@ -1073,21 +1097,25 @@ class BayesInference:
 
         """
 
-        MetaModel = self.engine.MetaModel
+        MetaModel = self.MetaModel
         Model = self.engine.Model
 
-        # Read observation data and perturb it if requested # TODO: where is the perturbation?
+        # Make a directory to save the prior/posterior predictive
+        out_dir = f'Outputs_Bayes_{Model.name}_{self.name}'
+        os.makedirs(out_dir, exist_ok=True)
+
+        # Read observation data and perturb it if requested
         if self.measured_data is None:
             self.measured_data = Model.read_observation(case=self.name)
 
         if not isinstance(self.measured_data, pd.DataFrame):
             self.measured_data = pd.DataFrame(self.measured_data)
 
-        # X_values and prior sigma2
+        # X_values
         x_values = self.engine.ExpDesign.x_values
+
         try:
-            sigma2_prior = self.Discrepancy.sigma2_prior  # TODO: what is this? Looks to be built for a different
-            # Discrepancy structure
+            sigma2_prior = self.Discrepancy.sigma2_prior
         except:
             sigma2_prior = None
 
@@ -1095,47 +1123,45 @@ class BayesInference:
         posterior_df = self.posterior_df
 
         # Take care of the sigma2
-        sigma2s = None
-        if sigma2_prior is not None:  # TODO: why is this the if for this code?
+        if sigma2_prior is not None:
             try:
-                sigma2s = posterior_df[self.Discrepancy.name].values  # TODO: what is Discrepancy.name?
+                sigma2s = posterior_df[self.Discrepancy.name].values
                 posterior_df = posterior_df.drop(
                     labels=self.Discrepancy.name, axis=1
-                )
+                    )
             except:
                 sigma2s = self.sigma2s
 
         # Posterior predictive
         if self.emulator:
-            if self.inference_method.lower() == 'rejection':  # TODO: combine these two? Why is there no
-                # post_pred_std for rejection sampling?
-                prior_pred = self._mean_pce_prior_pred
-            if self.name.lower() == 'valid':
-                post_pred = self._mean_pce_prior_pred
+            if self.inference_method == 'rejection':
+                prior_pred = self.__mean_pce_prior_pred
+            if self.name.lower() != 'calib':
+                post_pred = self.__mean_pce_prior_pred
                 post_pred_std = self._std_pce_prior_pred
             else:
-                post_pred, post_pred_std = MetaModel.eval_metamodel(  # TODO: recheck if this is needed
+                post_pred, post_pred_std = MetaModel.eval_metamodel(
                     samples=posterior_df.values
-                )
+                    )
 
-        else:  # TODO: see emulator version
-            if self.inference_method.lower() == 'rejection':
+        else:
+            if self.inference_method == 'rejection':
                 prior_pred = self.__model_prior_pred
-            if self.name.lower() == 'valid':
+            if self.name.lower() != 'calib':
                 post_pred = self.__mean_pce_prior_pred,
                 post_pred_std = self._std_pce_prior_pred
             else:
                 post_pred = self._eval_model(
                     samples=posterior_df.values, key='PostPred'
-                )
+                    )
         # Correct the predictions with Model discrepancy
-        if self.error_model:
+        if hasattr(self, 'error_model') and self.error_model:
             y_hat, y_std = self.error_MetaModel.eval_model_error(
                 self.bias_inputs, post_pred
-            )
+                )
             post_pred, post_pred_std = y_hat, y_std
 
-        # Add discrepancy from likelihood samples to the current posterior runs
+        # Add discrepancy from likelihood Sample to the current posterior runs
         total_sigma2 = self.Discrepancy.total_sigma2
         post_pred_withnoise = copy.deepcopy(post_pred)
         for varIdx, var in enumerate(Model.Output.names):
@@ -1147,15 +1173,16 @@ class BayesInference:
                 tot_sigma2 = clean_sigma2[:len(pred)]
                 cov = np.diag(tot_sigma2)
 
-                # Account for additional error terms
+                # Check the type error term
                 if sigma2_prior is not None:
                     # Inferred sigma2s
-                    if self.bias_inputs is not None and self.error_model is None:
+                    if hasattr(self, 'bias_inputs') and \
+                       not hasattr(self, 'error_model'):
                         # TODO: Infer a Bias model usig GPR
                         bias_inputs = np.hstack((
                             self.bias_inputs[var], pred.reshape(-1, 1)))
-                        params = sigma2s[i, varIdx * 3:(varIdx + 1) * 3]
-                        cov = _kernel_rbf(bias_inputs, params)
+                        params = sigma2s[i, varIdx*3:(varIdx+1)*3]
+                        cov = self._kernel_rbf(bias_inputs, params)
                     else:
                         # Infer equal sigma2s
                         try:
@@ -1166,25 +1193,25 @@ class BayesInference:
                         # Convert biasSigma2s to a covMatrix
                         cov += sigma2 * np.eye(len(pred))
 
-                # Add predictive metamodel error/uncertainty
                 if self.emulator:
-                    if MetaModel.rmse is not None:
+                    if hasattr(MetaModel, 'rmse') and \
+                       MetaModel.rmse is not None:
                         stdPCE = MetaModel.rmse[var]
                     else:
                         stdPCE = post_pred_std[var][i]
                     # Expected value of variance (Assump: i.i.d stds)
-                    cov += np.diag(stdPCE ** 2)
+                    cov += np.diag(stdPCE**2)
 
                 # Sample a multivariate normal distribution with mean of
-                # posterior prediction and variance of cov
+                # prediction and variance of cov
                 post_pred_withnoise[var][i] = np.random.multivariate_normal(
                     pred, cov, 1
-                )
+                    )
 
         # ----- Prior Predictive -----
         if self.inference_method.lower() == 'rejection':
             # Create hdf5 metadata
-            hdf5file = f'{self.out_dir}/priorPredictive.hdf5'
+            hdf5file = f'{out_dir}/priorPredictive.hdf5'
             hdf5_exist = os.path.exists(hdf5file)
             if hdf5_exist:
                 os.remove(hdf5file)
@@ -1205,7 +1232,7 @@ class BayesInference:
 
         # ----- Posterior Predictive only model evaluations -----
         # Create hdf5 metadata
-        hdf5file = self.out_dir + '/postPredictive_wo_noise.hdf5'
+        hdf5file = out_dir+'/postPredictive_wo_noise.hdf5'
         hdf5_exist = os.path.exists(hdf5file)
         if hdf5_exist:
             os.remove(hdf5file)
@@ -1226,7 +1253,7 @@ class BayesInference:
 
         # ----- Posterior Predictive with noise -----
         # Create hdf5 metadata
-        hdf5file = self.out_dir + '/postPredictive.hdf5'
+        hdf5file = out_dir+'/postPredictive.hdf5'
         hdf5_exist = os.path.exists(hdf5file)
         if hdf5_exist:
             os.remove(hdf5file)
@@ -1259,9 +1286,11 @@ class BayesInference:
 
         """
 
-        MetaModel = self.engine.MetaModel
+        MetaModel = self.MetaModel
         Model = self.engine.Model
+        out_dir = f'Outputs_Bayes_{Model.name}_{self.name}'
         opt_sigma = self.Discrepancy.opt_sigma
+
         # -------- Find MAP and run MetaModel and origModel --------
         # Compute the MAP
         if self.max_a_posteriori.lower() == 'mean':
@@ -1272,8 +1301,6 @@ class BayesInference:
             map_theta = Posterior_df.mean(axis=0).reshape(
                 (1, MetaModel.n_params))
         else:
-            # TODO: here just a fix, no previous mention of Posterior_df!
-            Posterior_df = None
             map_theta = stats.mode(Posterior_df.values, axis=0)[0]
         # Prin report
         print("\nPoint estimator:\n", map_theta[0])
@@ -1297,9 +1324,10 @@ class BayesInference:
         Marker = 'x'
 
         # Create a PdfPages object
-        pdf = PdfPages(f'./{self.out_dir}MAP_PCE_vs_Model_{self.name}.pdf')
+        pdf = PdfPages(f'./{out_dir}MAP_PCE_vs_Model_{self.name}.pdf')
         fig = plt.figure()
         for i, key in enumerate(Model.Output.names):
+
             y_val = map_orig_model[key]
             y_pce_val = map_metamodel_mean[key]
             y_pce_val_std = map_metamodel_std[key]
@@ -1310,13 +1338,13 @@ class BayesInference:
             plt.plot(
                 x_values, y_pce_val[i], color=Color[i], lw=2.0,
                 marker=Marker, linestyle='--', label='$Y_{MAP}^{PCE}$'
-            )
+                )
             # plot the confidence interval
             plt.fill_between(
-                x_values, y_pce_val[i] - 1.96 * y_pce_val_std[i],
-                y_pce_val[i] + 1.96 * y_pce_val_std[i],
+                x_values, y_pce_val[i] - 1.96*y_pce_val_std[i],
+                y_pce_val[i] + 1.96*y_pce_val_std[i],
                 color=Color[i], alpha=0.15
-            )
+                )
 
             # Calculate the adjusted R_squared and RMSE
             R2 = r2_score(y_pce_val.reshape(-1, 1), y_val.reshape(-1, 1))
@@ -1331,7 +1359,7 @@ class BayesInference:
             fig.canvas.draw()
             p = leg.get_window_extent().inverse_transformed(ax.transAxes)
             ax.text(
-                p.p0[1] - 0.05, p.p1[1] - 0.25,
+                p.p0[1]-0.05, p.p1[1]-0.25,
                 f'RMSE = {rmse:.3f}\n$R^2$ = {R2:.3f}',
                 transform=ax.transAxes, color='black',
                 bbox=dict(facecolor='none', edgecolor='black',
@@ -1347,110 +1375,6 @@ class BayesInference:
 
         pdf.close()
 
-    def plot_post_params(self, opt_sigma):
-        """
-        Plots the multivar. posterior parameter distribution.
-        
-
-        Parameters
-        ----------
-        opt_sigma : string
-            Type of uncertainty description available.
-
-        Returns
-        -------
-        None.
-
-        """
-        par_names = self.engine.ExpDesign.par_names
-        if opt_sigma != "B":
-            par_names.extend(
-                [self.Discrepancy.InputDisc.Marginals[i].name for i
-                 in range(len(self.Discrepancy.InputDisc.Marginals))]
-            )
-        # Pot with corner
-        figPosterior = corner.corner(self.posterior_df.to_numpy(),
-                                     labels=par_names,
-                                     quantiles=[0.15, 0.5, 0.85],
-                                     show_titles=True,
-                                     title_fmt=self.corner_title_fmt,
-                                     labelpad=0.2,
-                                     use_math_text=True,
-                                     title_kwargs={"fontsize": 28},
-                                     plot_datapoints=False,
-                                     plot_density=False,
-                                     fill_contours=True,
-                                     smooth=0.5,
-                                     smooth1d=0.5)
-
-        # Loop over axes and set x limits
-        if opt_sigma == "B":
-            axes = np.array(figPosterior.axes).reshape(
-                (len(par_names), len(par_names))
-            )
-            for yi in range(len(par_names)):
-                ax = axes[yi, yi]
-                ax.set_xlim(self.engine.ExpDesign.bound_tuples[yi])
-                for xi in range(yi):
-                    ax = axes[yi, xi]
-                    ax.set_xlim(self.engine.ExpDesign.bound_tuples[xi])
-        plt.close()
-
-        # Turn off gridlines
-        for ax in figPosterior.axes:
-            ax.grid(False)
-
-        if self.emulator:
-            plotname = f'/Posterior_Dist_{self.engine.Model.name}_emulator'
-        else:
-            plotname = f'/Posterior_Dist_{self.engine.Model.name}'
-
-        figPosterior.set_size_inches((24, 16))
-        figPosterior.savefig(f'./{self.out_dir}{plotname}.pdf',
-                             bbox_inches='tight')
-
-        plt.clf()
-
-    def plot_log_BME(self):
-        """
-        Plots the log_BME if bootstrap is active.
-
-        Returns
-        -------
-        None.
-
-        """
-
-        # Computing the TOM performance
-        self.log_BME_tom = stats.chi2.rvs(
-            self.n_tot_measurement, size=self.log_BME.shape[0]
-        )
-
-        fig, ax = plt.subplots()
-        sns.kdeplot(self.log_BME_tom, ax=ax, color="green", shade=True)
-        sns.kdeplot(
-            self.log_BME, ax=ax, color="blue", shade=True,
-            label='Model BME')
-
-        ax.set_xlabel('log$_{10}$(BME)')
-        ax.set_ylabel('Probability density')
-
-        legend_elements = [
-            Patch(facecolor='green', edgecolor='green', label='TOM BME'),
-            Patch(facecolor='blue', edgecolor='blue', label='Model BME')
-        ]
-        ax.legend(handles=legend_elements)
-
-        if self.emulator:
-            plotname = f'/BME_hist_{self.engine.Model.name}_emulator'
-        else:
-            plotname = f'/BME_hist_{self.engine.Model.name}'
-
-        plt.savefig(f'./{self.out_dir}{plotname}.pdf', bbox_inches='tight')
-
-        plt.show()
-        plt.clf()
-
     # -------------------------------------------------------------------------
     def _plot_post_predictive(self):
         """
@@ -1463,6 +1387,7 @@ class BayesInference:
         """
 
         Model = self.engine.Model
+        out_dir = f'Outputs_Bayes_{Model.name}_{self.name}'
         # Plot the posterior predictive
         for out_idx, out_name in enumerate(Model.Output.names):
             fig, ax = plt.subplots()
@@ -1471,10 +1396,11 @@ class BayesInference:
 
                 # --- Read prior and posterior predictive ---
                 if self.inference_method == 'rejection' and \
-                        self.name.lower() == 'calib':
+                   self.name.lower() != 'valid':
                     #  --- Prior ---
                     # Load posterior predictive
-                    f = h5py.File(f'{self.out_dir}/priorPredictive.hdf5', 'r+')
+                    f = h5py.File(
+                        f'{out_dir}/priorPredictive.hdf5', 'r+')
 
                     try:
                         x_coords = np.array(f[f"x_values/{out_name}"])
@@ -1483,8 +1409,10 @@ class BayesInference:
 
                     X_values = np.repeat(x_coords, 10000)
 
-                    prior_pred_df = {x_key: X_values, out_name: np.array(
-                        f[f"EDY/{out_name}"])[:10000].flatten('F')}
+                    prior_pred_df = {}
+                    prior_pred_df[x_key] = X_values
+                    prior_pred_df[out_name] = np.array(
+                        f[f"EDY/{out_name}"])[:10000].flatten('F')
                     prior_pred_df = pd.DataFrame(prior_pred_df)
 
                     tags_post = ['prior'] * len(prior_pred_df)
@@ -1494,13 +1422,16 @@ class BayesInference:
                     f.close()
 
                     # --- Posterior ---
-                    f = h5py.File(f"{self.out_dir}/postPredictive.hdf5", 'r+')
+                    f = h5py.File(f"{out_dir}/postPredictive.hdf5", 'r+')
 
                     X_values = np.repeat(
                         x_coords, np.array(f[f"EDY/{out_name}"]).shape[0])
 
-                    post_pred_df = {x_key: X_values, out_name: np.array(
-                        f[f"EDY/{out_name}"]).flatten('F')}
+                    post_pred_df = {}
+                    post_pred_df[x_key] = X_values
+                    post_pred_df[out_name] = np.array(
+                        f[f"EDY/{out_name}"]).flatten('F')
+
                     post_pred_df = pd.DataFrame(post_pred_df)
 
                     tags_post = ['posterior'] * len(post_pred_df)
@@ -1528,7 +1459,7 @@ class BayesInference:
 
                     ax.errorbar(
                         x_coords, obs_data[out_name].values,
-                        yerr=1.96 * self.measurement_error[out_name],
+                        yerr=1.96*self.measurement_error[out_name],
                         ecolor='g', fmt=' ', zorder=-1)
 
                     # Add labels to the legend
@@ -1546,7 +1477,7 @@ class BayesInference:
 
                 else:
                     # Load posterior predictive
-                    f = h5py.File(f"{self.out_dir}/postPredictive.hdf5", 'r+')
+                    f = h5py.File(f"{out_dir}/postPredictive.hdf5", 'r+')
 
                     try:
                         x_coords = np.array(f[f"x_values/{out_name}"])
@@ -1561,7 +1492,7 @@ class BayesInference:
                         x_coords, mu, marker='o', color='b',
                         label='Mean Post. Predictive')
                     plt.fill_between(
-                        x_coords, mu - 1.96 * std, mu + 1.96 * std, color='b',
+                        x_coords, mu-1.96*std, mu+1.96*std, color='b',
                         alpha=0.15)
 
                     # --- Plot Data ---
@@ -1574,7 +1505,7 @@ class BayesInference:
                     for output in orig_ED_Y:
                         plt.plot(
                             x_coords, output, color='grey', alpha=0.15
-                        )
+                            )
 
                     # Add labels for axes
                     plt.xlabel('Time [s]')
@@ -1597,7 +1528,5 @@ class BayesInference:
                 else:
                     plotname = f'/Post_Prior_Perd_{Model.name}'
 
-                fig.savefig(f'./{self.out_dir}{plotname}_{out_name}.pdf',
+                fig.savefig(f'./{out_dir}{plotname}_{out_name}.pdf',
                             bbox_inches='tight')
-
-        plt.clf()
diff --git a/src/bayesvalidrox/bayes_inference/bayes_model_comparison.py b/src/bayesvalidrox/bayes_inference/bayes_model_comparison.py
index a26eaa886..828613556 100644
--- a/src/bayesvalidrox/bayes_inference/bayes_model_comparison.py
+++ b/src/bayesvalidrox/bayes_inference/bayes_model_comparison.py
@@ -1,7 +1,6 @@
 #!/usr/bin/env python3
 # -*- coding: utf-8 -*-
 
-import emcee
 import numpy as np
 import os
 from scipy import stats
@@ -9,7 +8,6 @@ import seaborn as sns
 import matplotlib.patches as patches
 import matplotlib.colors as mcolors
 import matplotlib.pylab as plt
-import pandas as pd
 from .bayes_inference import BayesInference
 
 # Load the mplstyle
@@ -29,89 +27,31 @@ class BayesModelComparison:
         `True`.
     perturbed_data : array of shape (n_bootstrap_itrs, n_obs), optional
         User defined perturbed data. The default is `None`.
-    n_bootstrap : int
+    n_bootstarp : int
         Number of bootstrap iteration. The default is `1000`.
     data_noise_level : float
         A noise level to perturb the data set. The default is `0.01`.
+    just_n_meas : int
+        Number of measurements considered for visualization of the
+        justifiability results.
 
     """
 
     def __init__(self, justifiability=True, perturbed_data=None,
-                 n_bootstrap=1000, data_noise_level=0.01,
-                 use_Bayes_settings = True, emulator = True, out_dir = 'Outputs_Comparison/'):
+                 n_bootstarp=1000, data_noise_level=0.01, just_n_meas=2):
 
-        # TODO: check valid ranges of the parameters
-        
         self.justifiability = justifiability
         self.perturbed_data = perturbed_data
-        self.n_bootstrap = n_bootstrap
+        self.n_bootstarp = n_bootstarp
         self.data_noise_level = data_noise_level
-        self.use_Bayes_settings = use_Bayes_settings
-        self.emulator = emulator
-        self.out_dir = out_dir
-        
-        # Other parameters
-        self.n_meas = None
-        self.BF_data = None
-        self.just_data = None
-        self.BME_dict = None
-        self.set_up = False
-        self.dtype = None
-        self.bayes_dict = None
-        self.just_bayes_dict = None
-        self.model_weights = None
-        self.model_weights_dict = None
-        self.just_model_weights_dict = None
-        
-        
-    # --------------------------------------------------------------------------
-    def setup(self, model_dict):
-        """
-        Initialize parameters that are needed for all types of model comparison
-
-        Returns
-        -------
-        None.
-
-        """
-        
-        if not isinstance(model_dict, dict):
-            raise Exception("To run model comparsion, you need to pass a "
-                            "dictionary of models.")
-
-        # Extract model names
-        self.model_names = [*model_dict]
-
-        # Compute total number of the measurement points
-        # TODO: there could be a different option for this here
-        Engine = list(model_dict.items())[0][1]
-        Engine.Model.read_observation()
-        self.n_meas = Engine.Model.n_obs
-
-        # Find n_bootstrap
-        if self.perturbed_data is not None:
-            self.n_bootstrap = self.perturbed_data.shape[0]
-            
-        # Output directory
-        os.makedirs(self.out_dir, exist_ok=True)
-
-        # System settings
-        if os.name == 'nt':
-            print('')
-            print('WARNING: Performing the inference on windows can lead to reduced accuracy!')
-            print('')
-            self.dtype=np.longdouble
-        else:
-            self.dtype=np.float128
-
+        self.just_n_meas = just_n_meas
 
     # --------------------------------------------------------------------------
-    def model_comparison_all(self, model_dict, opts_dict):
+    def create_model_comparison(self, model_dict, opts_dict):
         """
-        Perform all three types of model comparison: 
-            * Bayes Factors
-            * Model weights
-            * Justifiability analysis
+        Starts the two-stage model comparison.
+        Stage I: Compare models using Bayes factors.
+        Stage II: Compare models via justifiability analysis.
 
         Parameters
         ----------
@@ -120,27 +60,53 @@ class BayesModelComparison:
         opts_dict : dict
             A dictionary given the `BayesInference` options.
 
+            Example:
+
+                >>> opts_bootstrap = {
+                    "bootstrap": True,
+                    "n_samples": 10000,
+                    "Discrepancy": DiscrepancyOpts,
+                    "emulator": True,
+                    "plot_post_pred": True
+                    }
+
         Returns
         -------
-        results : dict
-            A dictionary that contains the calculated BME values, model weights
-            and confusion matrix
+        output : dict
+            A dictionary containing the objects and the model weights for the
+            comparison using Bayes factors and justifiability analysis.
 
         """
-        self.calc_bayes_factors(model_dict, opts_dict)
-        self.calc_model_weights(model_dict, opts_dict)
-        self.calc_justifiability_analysis(model_dict, opts_dict)
-        
-        results = {'BME': self.BME_dict, 'Model weights': self.model_weights_dict,
-                   'Confusion matrix': self.confusion_matrix}
-        return results
-    
+
+        # Bayes factor
+        bayes_dict_bf, model_weights_dict_bf = self.compare_models(
+            model_dict, opts_dict
+            )
+
+        output = {
+            'Bayes objects BF': bayes_dict_bf,
+            'Model weights BF': model_weights_dict_bf
+            }
+
+        # Justifiability analysis
+        if self.justifiability:
+            bayes_dict_ja, model_weights_dict_ja = self.compare_models(
+                model_dict, opts_dict, justifiability=True
+                )
+
+            output['Bayes objects JA'] = bayes_dict_ja
+            output['Model weights JA'] = model_weights_dict_ja
+
+        return output
 
     # --------------------------------------------------------------------------
-    def calc_bayes_factors(self, model_dict, opts_dict):
+    def compare_models(self, model_dict, opts_dict, justifiability=False):
         """
-        Calculate the BayesFactors for each pair of models in the model_dict
-        with respect to given data.
+        Passes the options to instantiates the BayesInference class for each
+        model and passes the options from `opts_dict`. Then, it starts the
+        computations.
+        It also creates a folder and saves the diagrams, e.g., Bayes factor
+        plot, confusion matrix, etc.
 
         Parameters
         ----------
@@ -148,28 +114,50 @@ class BayesModelComparison:
             A dictionary including the metamodels.
         opts_dict : dict
             A dictionary given the `BayesInference` options.
+        justifiability : bool, optional
+            Whether to perform the justifiability analysis. The default is
+            `False`.
 
         Returns
         -------
-        None.
+        bayes_dict : dict
+            A dictionary with `BayesInference` objects.
+        model_weights_dict : dict
+            A dictionary containing the model weights.
 
         """
-        # Do the setup
-        if self.n_meas is None:
-            self.setup(model_dict)
-        
+
+        if not isinstance(model_dict, dict):
+            raise Exception("To run model comparsion, you need to pass a "
+                            "dictionary of models.")
+
+        # Extract model names
+        self.model_names = [*model_dict]
+
+        # Compute total number of the measurement points
+        Engine = list(model_dict.items())[0][1]
+        Engine.Model.read_observation()
+        self.n_meas = Engine.Model.n_obs
+
         # ----- Generate data -----
+        # Find n_bootstrap
+        if self.perturbed_data is None:
+            n_bootstarp = self.n_bootstarp
+        else:
+            n_bootstarp = self.perturbed_data.shape[0]
+
         # Create dataset
-        self.BF_data = self.generate_dataset(
-            model_dict, False, n_bootstrap=self.n_bootstrap)
+        justData = self.generate_dataset(
+            model_dict, justifiability, n_bootstarp=n_bootstarp)
 
         # Run create Interface for each model
-        self.bayes_dict = {}
+        bayes_dict = {}
         for model in model_dict.keys():
             print("-"*20)
             print("Bayesian inference of {}.\n".format(model))
+
             BayesOpts = BayesInference(model_dict[model])
-                
+
             # Set BayesInference options
             for key, value in opts_dict.items():
                 if key in BayesOpts.__dict__.keys():
@@ -179,147 +167,49 @@ class BayesModelComparison:
                         setattr(BayesOpts, key, value)
 
             # Pass justifiability data as perturbed data
-            BayesOpts.bmc = True
-            BayesOpts.emulator= self.emulator
-            BayesOpts.just_analysis = False
-            BayesOpts.perturbed_data = self.BF_data
+            BayesOpts.perturbed_data = justData
+            BayesOpts.just_analysis = justifiability
 
-            self.bayes_dict[model] = BayesOpts.create_inference()
+            bayes_dict[model] = BayesOpts.create_inference()
             print("-"*20)
 
-        # Accumulate the BMEs
-        self.BME_dict = dict()
-        for modelName, bayesObj in self.bayes_dict.items():
-            self.BME_dict[modelName] = np.exp(bayesObj.log_BME, dtype=self.dtype)
-
-        # TODO: move the calculation of the Bayes Factors out of the plots to here!
-        # Create kde plot for bayes factors
-        self.plot_bayes_factor(self.BME_dict, 'kde_plot')
-        
-        
-    def calc_model_weights(self, model_dict, opts_dict):
-        """
-        Calculate the model weights from BME evaluations for Bayes factors.
-
-        Parameters
-        ----------
-        model_dict : TYPE
-            DESCRIPTION.
-        opts_dict : TYPE
-            DESCRIPTION.
-
-        Returns
-        -------
-        None.
+        # Compute model weights
+        BME_Dict = dict()
+        for modelName, bayesObj in bayes_dict.items():
+            BME_Dict[modelName] = np.exp(bayesObj.log_BME, dtype=np.longdouble)#float128)
 
-        """
-        # Get BMEs via Bayes Factors if not already done so
-        if self.BME_dict is None:
-            self.calc_bayes_factors(model_dict, opts_dict)
-        
-        # Calculate the model weights
-        self.model_weights = self.cal_model_weight(
-            self.BME_dict, False, n_bootstrap=self.n_bootstrap)
+        # BME correction in BayesInference class
+        model_weights = self.cal_model_weight(
+            BME_Dict, justifiability, n_bootstarp=n_bootstarp)
 
-        # Create box plot for model weights
-        self.plot_model_weights(self.model_weights, 'model_weights')
+        # Plot model weights
+        if justifiability:
+            model_names = self.model_names
+            model_names.insert(0, 'Observation')
 
+            # Split the model weights and save in a dict
+            list_ModelWeights = np.split(
+                model_weights, model_weights.shape[1]/self.n_meas, axis=1)
+            model_weights_dict = {key: weights for key, weights in
+                                  zip(model_names, list_ModelWeights)}
 
-    # -------------------------------------------------------------------------    
-    def calc_justifiability_analysis(self, model_dict, opts_dict):
-        """
-        Perform justifiability analysis by calculating the confusion matrix
-        
-        Parameters
-        ----------
-        model_dict : dict
-            A dictionary including the metamodels.
-        opts_dict : dict
-            A dictionary given the `BayesInference` options.
-        
-        Returns
-        -------
-        confusion_matrix: dict
-            The averaged confusion matrix
-        
-        """
-        # Do setup
-        if self.n_meas is None:
-            self.setup(model_dict)
-            
-        # Extend model names
-        model_names = self.model_names
-        if model_names[0]!= 'Observation':
-            model_names.insert(0, 'Observation')
-        
-        # Generate data
-        # TODO: generate the datset only if it does not exist yet
-        self.just_data = self.generate_dataset(
-            model_dict, True, n_bootstrap=self.n_bootstrap)
-
-        # Run inference for each model if this is not available
-        if self.just_bayes_dict is None:
-            self.just_bayes_dict = {}
-            for model in model_dict.keys():
-                print("-"*20)
-                print("Bayesian inference of {}.\n".format(model))
-                BayesOpts = BayesInference(model_dict[model])
-                    
-                # Set BayesInference options
-                for key, value in opts_dict.items():
-                    if key in BayesOpts.__dict__.keys():
-                        if key == "Discrepancy" and isinstance(value, dict):
-                            setattr(BayesOpts, key, value[model])
-                        else:
-                            setattr(BayesOpts, key, value)
-    
-                # Pass justifiability data as perturbed data
-                BayesOpts.bmc = True
-                BayesOpts.emulator= self.emulator
-                BayesOpts.just_analysis = True
-                BayesOpts.perturbed_data = self.just_data
-    
-                self.just_bayes_dict[model] = BayesOpts.create_inference()
-                print("-"*20)
+            #self.plot_just_analysis(model_weights_dict)
+        else:
+            # Create box plot for model weights
+            self.plot_model_weights(model_weights, 'model_weights')
 
-        # Compute model weights
-        self.BME_dict = dict()
-        for modelName, bayesObj in self.bayes_dict.items():
-            self.BME_dict[modelName] = np.exp(bayesObj.log_BME, dtype=self.dtype)
+            # Create kde plot for bayes factors
+            self.plot_bayes_factor(BME_Dict, 'kde_plot')
 
-        # BME correction in BayesInference class
-        just_model_weights = self.cal_model_weight(
-            self.BME_dict, True, n_bootstrap=self.n_bootstrap)
-
-        # Split the model weights and save in a dict
-        list_ModelWeights = np.split(
-            just_model_weights, self.model_weights.shape[1]/self.n_meas, axis=1)
-        self.just_model_weights_dict = {key: weights for key, weights in
-                              zip(model_names, list_ModelWeights)}
-        
-        # Confusion matrix over all measurement points
-        cf_m = pd.DataFrame()
-        cf_m['Generated by'] = model_names
-        for i in range(len(model_names)):
-            # Ignore 'Observation', this not in the model_weights_dict
-            # TODO: how to change the code so that it is included as well?
-            if i==0:
-                continue
-            avg = []
-            for n in model_names:
-                avg.append(np.sum(self.just_model_weights_dict[n][i-1]))
-                
-            # Norm to sum to 1 for each 'Generated by' row
-            cf_m[model_names[i]] = avg/self.n_meas
-        self.confusion_matrix = cf_m
-            
-        # Plot model weights
-        self.plot_just_analysis()
+            # Store model weights in a dict
+            model_weights_dict = {key: weights for key, weights in
+                                  zip(self.model_names, model_weights)}
 
+        return bayes_dict, model_weights_dict
 
     # -------------------------------------------------------------------------
     def generate_dataset(self, model_dict, justifiability=False,
-                         n_bootstrap=1):
+                         n_bootstarp=1):
         """
         Generates the perturbed data set for the Bayes factor calculations and
         the data set for the justifiability analysis.
@@ -331,7 +221,7 @@ class BayesModelComparison:
         bool, optional
             Whether to perform the justifiability analysis. The default is
             `False`.
-        n_bootstrap : int, optional
+        n_bootstarp : int, optional
             Number of bootstrap iterations. The default is `1`.
 
         Returns
@@ -348,28 +238,27 @@ class BayesModelComparison:
         # Perturb observations for Bayes Factor
         if self.perturbed_data is None:
             self.perturbed_data = self.__perturb_data(
-                    Engine.Model.observations, out_names, n_bootstrap,
+                    Engine.Model.observations, out_names, n_bootstarp,
                     noise_level=self.data_noise_level)
 
         # Only for Bayes Factor
         if not justifiability:
-            return self.perturbed_data # TODO: why return this as self... and the other one not? Is this used again?
+            return self.perturbed_data
 
         # Evaluate metamodel
         runs = {}
-        for key, metaModel in model_dict.items(): # TODO: add check for emulator vs model
-            y_hat, _ = metaModel.eval_metamodel(nsamples=n_bootstrap)
+        for key, metaModel in model_dict.items():
+            y_hat, _ = metaModel.eval_metamodel(nsamples=n_bootstarp)
             runs[key] = y_hat
 
         # Generate data
-        for i in range(n_bootstrap):
-            y_data = self.perturbed_data[i].reshape(1, -1)# makes every entry in self.perturbed_data 2D by adding one dim outside
-            justData = np.tril(np.repeat(y_data, y_data.shape[1], axis=0)) # Lower triangle matrix from repeats of y_data
-            # TODO: why triangle matrix here?
+        for i in range(n_bootstarp):
+            y_data = self.perturbed_data[i].reshape(1, -1)
+            justData = np.tril(np.repeat(y_data, y_data.shape[1], axis=0))
             # Use surrogate runs for data-generating process
             for key, metaModel in model_dict.items():
                 model_data = np.array(
-                    [runs[key][out][i] for out in out_names]).reshape(y_data.shape) # reshapes model runs to match y_data
+                    [runs[key][out][i] for out in out_names]).reshape(y_data.shape)
                 justData = np.vstack((
                     justData,
                     np.tril(np.repeat(model_data, model_data.shape[1], axis=0))
@@ -387,7 +276,7 @@ class BayesModelComparison:
     # -------------------------------------------------------------------------
     def __perturb_data(self, data, output_names, n_bootstrap, noise_level):
         """
-        Returns an array with n_bootstrap_itrs rows of perturbed data.
+        Returns an array with n_bootstrap_itrs rowsof perturbed data.
         The first row includes the original observation data.
         If `self.bayes_loocv` is True, a 2d-array will be returned with
         repeated rows and zero diagonal entries.
@@ -429,13 +318,13 @@ class BayesModelComparison:
         return final_data
 
     # -------------------------------------------------------------------------
-    def cal_model_weight(self, BME_dict, justifiability=False, n_bootstrap=1):
+    def cal_model_weight(self, BME_Dict, justifiability=False, n_bootstarp=1):
         """
         Normalize the BME (Asumption: Model Prior weights are equal for models)
 
         Parameters
         ----------
-        BME_dict : dict
+        BME_Dict : dict
             A dictionary containing the BME values.
 
         Returns
@@ -445,12 +334,12 @@ class BayesModelComparison:
 
         """
         # Stack the BME values for all models
-        all_BME = np.vstack(list(BME_dict.values()))
+        all_BME = np.vstack(list(BME_Dict.values()))
 
         if justifiability:
             # Compute expected log_BME for justifiabiliy analysis
             all_BME = all_BME.reshape(
-                all_BME.shape[0], -1, n_bootstrap).mean(axis=2)
+                all_BME.shape[0], -1, n_bootstarp).mean(axis=2)
 
         # Model weights
         model_weights = np.divide(all_BME, np.nansum(all_BME, axis=0))
@@ -473,16 +362,16 @@ class BayesModelComparison:
         None.
 
         """
-        model_weights_dict = self.just_model_weights_dict
+
+        directory = 'Outputs_Comparison/'
+        os.makedirs(directory, exist_ok=True)
         Color = [*mcolors.TABLEAU_COLORS]
         names = [*model_weights_dict]
 
-        # Plot weights for each 'Generated by'
         model_names = [model.replace('_', '$-$') for model in self.model_names]
         for name in names:
             fig, ax = plt.subplots()
             for i, model in enumerate(model_names[1:]):
-                #print(model, i)
                 plt.plot(list(range(1, self.n_meas+1)),
                          model_weights_dict[name][i],
                          color=Color[i], marker='o',
@@ -495,12 +384,13 @@ class BayesModelComparison:
             ax.set_xticks(list(range(1, self.n_meas+1)))
             plt.legend(loc="best")
             fig.savefig(
-                f'{self.out_dir}modelWeights_{name}.svg', bbox_inches='tight'
+                f'{directory}modelWeights_{name}.svg', bbox_inches='tight'
                 )
             plt.close()
 
-        # Confusion matrix for each measurement point
-        for index in range(0, self.n_meas):
+        # Confusion matrix for some measurement points
+        epsilon = 1 if self.just_n_meas != 1 else 0
+        for index in range(0, self.n_meas+epsilon, self.just_n_meas):
             weights = np.array(
                 [model_weights_dict[key][:, index] for key in model_weights_dict]
                 )
@@ -515,26 +405,10 @@ class BayesModelComparison:
             g.set_xlabel(r"\textbf{Data generated by:}", labelpad=15)
             g.set_ylabel(r"\textbf{Model weight for:}", labelpad=15)
             g.figure.savefig(
-                f"{self.out_dir}confusionMatrix_ND_{index+1}.pdf",
+                f"{directory}confusionMatrix_ND_{index+1}.pdf",
                 bbox_inches='tight'
                 )
             plt.close()
-                
-        # Plot the averaged confusion matrix
-        out_names = names[1:]
-        cf = self.confusion_matrix[out_names].to_numpy()
-        g = sns.heatmap(cf.T, annot=True, cmap='Blues', xticklabels=model_names,
-        yticklabels=model_names[1:], annot_kws={"size": 24})
-        g.xaxis.tick_top()
-        g.xaxis.set_label_position('top')
-        g.set_xlabel(r"\textbf{Data generated by:}", labelpad=15)
-        g.set_ylabel(r"\textbf{Model weight for:}", labelpad=15)
-        g.figure.savefig(
-            f"{self.out_dir}confusionMatrix_full.pdf",
-            bbox_inches='tight'
-            )
-        plt.close()
-        
 
     # -------------------------------------------------------------------------
     def plot_model_weights(self, model_weights, plot_name):
@@ -554,9 +428,13 @@ class BayesModelComparison:
         None.
 
         """
+        font_size = 40
+        # mkdir for plots
+        directory = 'Outputs_Comparison/'
+        os.makedirs(directory, exist_ok=True)
+
         # Create figure
         fig, ax = plt.subplots()
-        font_size = 40
 
         # Filter data using np.isnan
         mask = ~np.isnan(model_weights.T)
@@ -584,35 +462,44 @@ class BayesModelComparison:
         for median in bp['medians']:
             median.set(color='#b2df8a', linewidth=2)
 
-        # Customize the axes
+        # change the style of fliers and their fill
+        # for flier in bp['fliers']:
+        #     flier.set(marker='o', color='#e7298a', alpha=0.75)
+
+        # Custom x-axis labels
         model_names = [model.replace('_', '$-$') for model in self.model_names]
         ax.set_xticklabels(model_names)
+
         ax.set_ylabel('Weight', fontsize=font_size)
+
+        # Title
+        plt.title('Posterior Model Weights')
+
+        # Set y lim
         ax.set_ylim((-0.05, 1.05))
+
+        # Set size of the ticks
         for t in ax.get_xticklabels():
             t.set_fontsize(font_size)
         for t in ax.get_yticklabels():
             t.set_fontsize(font_size)
 
-        # Title
-        plt.title('Posterior Model Weights')
-        
         # Save the figure
         fig.savefig(
-            f'./{self.out_dir}{plot_name}.pdf', bbox_inches='tight'
+            f'./{directory}{plot_name}.pdf', bbox_inches='tight'
             )
 
         plt.close()
 
     # -------------------------------------------------------------------------
-    def plot_bayes_factor(self, BME_dict, plot_name=''):
+    def plot_bayes_factor(self, BME_Dict, plot_name=''):
         """
         Plots the Bayes factor distibutions in a :math:`N_m \\times N_m`
         matrix, where :math:`N_m` is the number of the models.
 
         Parameters
         ----------
-        BME_dict : dict
+        BME_Dict : dict
             A dictionary containing the BME values of the models.
         plot_name : str, optional
             Plot name. The default is ''.
@@ -622,11 +509,16 @@ class BayesModelComparison:
         None.
 
         """
-        # Plot setup
+
         font_size = 40
+
+        # mkdir for plots
+        directory = 'Outputs_Comparison/'
+        os.makedirs(directory, exist_ok=True)
+
         Colors = ["blue", "green", "gray", "brown"]
 
-        model_names = list(BME_dict.keys())
+        model_names = list(BME_Dict.keys())
         nModels = len(model_names)
 
         # Plots
@@ -648,7 +540,7 @@ class BayesModelComparison:
 
                     # Null hypothesis: key_j is the better model
                     BayesFactor = np.log10(
-                        np.divide(BME_dict[key_i], BME_dict[key_j])
+                        np.divide(BME_Dict[key_i], BME_Dict[key_j])
                         )
 
                     # sns.kdeplot(BayesFactor, ax=ax, color=Colors[i], shade=True)
@@ -741,8 +633,10 @@ class BayesModelComparison:
                             fontsize=fsize, color=Colors[i],
                             transform=ax.transAxes)
 
-        # Customize axes
+        # Defining custom 'ylim' values.
         custom_ylim = (0, 1.05)
+
+        # Setting the values for all axes.
         plt.setp(axes, ylim=custom_ylim)
 
         # set labels
@@ -754,7 +648,7 @@ class BayesModelComparison:
         plt.subplots_adjust(wspace=0.2, hspace=0.1)
 
         plt.savefig(
-            f'./{self.out_dir}Bayes_Factor{plot_name}.pdf', bbox_inches='tight'
+            f'./{directory}Bayes_Factor{plot_name}.pdf', bbox_inches='tight'
             )
 
         plt.close()
diff --git a/src/bayesvalidrox/bayes_inference/discrepancy.py b/src/bayesvalidrox/bayes_inference/discrepancy.py
index b3c235ebe..fff32a250 100644
--- a/src/bayesvalidrox/bayes_inference/discrepancy.py
+++ b/src/bayesvalidrox/bayes_inference/discrepancy.py
@@ -36,7 +36,7 @@ class Discrepancy:
     * Option B: With unknown redidual covariance matrix \\(\\Sigma\\),
     paramethrized as \\(\\Sigma(\\theta_{\\epsilon})=\\sigma^2 \\textbf{I}_
     {N_{out}}\\) with unknown residual variances \\(\\sigma^2\\).
-    This term will be jointly infered with the uncertain input parameters. For
+    This term will be jointly infer with the uncertain input parameters. For
     the inversion, you need to define a prior marginal via `Input` class. Note
     that \\(\\sigma^2\\) is only a single scalar multiplier for the diagonal
     entries of the covariance matrix \\(\\Sigma\\).
@@ -58,17 +58,10 @@ class Discrepancy:
     """
 
     def __init__(self, InputDisc='', disc_type='Gaussian', parameters=None):
-        # Set the values
         self.InputDisc = InputDisc
         self.disc_type = disc_type
         self.parameters = parameters
-        
-        # Other inits
-        self.ExpDesign = None
-        self.n_samples = None
-        self.sigma2_prior = None
-        self.name = None
-        self.opt_sigma = None # This will be set in the inference class and used in mcmc
+
     # -------------------------------------------------------------------------
     def get_sample(self, n_samples):
         """
@@ -94,11 +87,6 @@ class Discrepancy:
         # Create and store BoundTuples
         self.ExpDesign = ExpDesigns(self.InputDisc)
         self.ExpDesign.sampling_method = 'random'
-        
-        # TODO: why does it call 'generate_ED' instead of 'generate_samples?
-        # ExpDesign.bound_tuples, onp_sigma, prior_space needed from the outside
-        # Discrepancy opt_sigma, InputDisc needed from the outside
-        # TODO: opt_sigma not defined here, but called from the outside??
         self.ExpDesign.generate_ED(
             n_samples, max_pce_deg=1
             )
diff --git a/src/bayesvalidrox/bayes_inference/mcmc.py b/src/bayesvalidrox/bayes_inference/mcmc.py
index f4d1524d3..fe22a152f 100755
--- a/src/bayesvalidrox/bayes_inference/mcmc.py
+++ b/src/bayesvalidrox/bayes_inference/mcmc.py
@@ -15,85 +15,6 @@ import shutil
 os.environ["OMP_NUM_THREADS"] = "1"
 
 
-# -------------------------------------------------------------------------
-def _check_ranges(theta, ranges): # TODO: this is a replica of exp_designs.check_ranges
-    """
-    This function checks if theta lies in the given ranges.
-
-    Parameters
-    ----------
-    theta : array
-        Proposed parameter set.
-    ranges : nested list
-        List of the praremeter ranges.
-
-    Returns
-    -------
-    c : bool
-        If it lies in the given range, it return True else False.
-
-    """
-    c = True
-    # traverse in the list1
-    for i, bounds in enumerate(ranges):
-        x = theta[i]
-        # condition check
-        if x < bounds[0] or x > bounds[1]:
-            c = False
-            return c
-    return c
-
-# -------------------------------------------------------------------------
-def gelman_rubin(chain, return_var=False):
-    """
-    The potential scale reduction factor (PSRF) defined by the variance
-    within one chain, W, with the variance between chains B.
-    Both variances are combined in a weighted sum to obtain an estimate of
-    the variance of a parameter \\( \\theta \\).The square root of the
-    ratio of this estimates variance to the within chain variance is called
-    the potential scale reduction.
-    For a well converged chain it should approach 1. Values greater than
-    1.1 typically indicate that the chains have not yet fully converged.
-
-    Source: http://joergdietrich.github.io/emcee-convergence.html
-
-    https://github.com/jwalton3141/jwalton3141.github.io/blob/master/assets/posts/ESS/rwmh.py
-
-    Parameters
-    ----------
-    chain : array (n_walkers, n_steps, n_params)
-        The emcee ensamples.
-
-    Returns
-    -------
-    R_hat : float
-        The Gelman-Robin values.
-
-    """
-    chain = np.array(chain)
-    m_chains, n_iters = chain.shape[:2]
-
-    # Calculate between-chain variance
-    θb = np.mean(chain, axis=1)
-    θbb = np.mean(θb, axis=0)
-    B_over_n = ((θbb - θb)**2).sum(axis=0)
-    B_over_n /= (m_chains - 1)
-
-    # Calculate within-chain variances
-    ssq = np.var(chain, axis=1, ddof=1)
-    W = np.mean(ssq, axis=0)
-
-    # (over) estimate of variance
-    var_θ = W * (n_iters - 1) / n_iters + B_over_n
-
-    if return_var:
-        return var_θ
-    else:
-        # The square root of the ratio of this estimates variance to the
-        # within chain variance
-        R_hat = np.sqrt(var_θ / W)
-        return R_hat
-
 class MCMC:
     """
     A class for bayesian inference via a Markov-Chain Monte-Carlo (MCMC)
@@ -120,67 +41,65 @@ class MCMC:
     """
 
     def __init__(self, BayesOpts):
-        # Inputs
+
         self.BayesOpts = BayesOpts
-        
-        # Param inits
-        self.counter = 0
-        self.observation = None
-        self.total_sigma2 = None
-        
-        # Get general params from BayesOpts
-        self.out_dir = self.BayesOpts.out_dir
-        
-        # Get MCMC parameters ftom BayesOpts
-        pars = self.BayesOpts.mcmc_params
-        self.initsamples = pars['init_samples']
-        if isinstance(self.initsamples, pd.DataFrame):
-            self.initsamples = self.initsamples.values
-        self.nsteps = int(pars['n_steps'])
-        self.nwalkers = int(pars['n_walkers'])
-        self.nburn = pars['n_burn']
-        self.moves = pars['moves']
-        self.mp = pars['multiprocessing']
-        self.verbose = pars['verbose']
 
     def run_sampler(self, observation, total_sigma2):
-        """
-        Run the MCMC sampler for the given observations and stdevs.
-
-        Parameters
-        ----------
-        observation : TYPE
-            DESCRIPTION.
-        total_sigma2 : TYPE
-            DESCRIPTION.
 
-        Returns
-        -------
-        Posterior_df : TYPE
-            DESCRIPTION.
-
-        """
-        # Get init values
         BayesObj = self.BayesOpts
+        MetaModel = BayesObj.engine.MetaModel
+        Model = BayesObj.engine.Model
         Discrepancy = self.BayesOpts.Discrepancy
-        n_cpus = BayesObj.engine.Model.n_cpus
-        ndim = BayesObj.engine.MetaModel.n_params
-        if not os.path.exists(self.out_dir):
-            os.makedirs(self.out_dir)
+        n_cpus = Model.n_cpus
+        priorDist = BayesObj.engine.ExpDesign.JDist
+        ndim = MetaModel.n_params
+        self.counter = 0
+        output_dir = f'Outputs_Bayes_{Model.name}_{self.BayesOpts.name}'
+        if not os.path.exists(output_dir):
+            os.makedirs(output_dir)
 
-        # Save inputs
         self.observation = observation
         self.total_sigma2 = total_sigma2
 
+        # Unpack mcmc parameters given to BayesObj.mcmc_params
+        self.initsamples = None
+        self.nwalkers = 100
+        self.nburn = 200
+        self.nsteps = 100000
+        self.moves = None
+        self.mp = False
+        self.verbose = False
+
+        # Extract initial samples
+        if 'init_samples' in BayesObj.mcmc_params:
+            self.initsamples = BayesObj.mcmc_params['init_samples']
+            if isinstance(self.initsamples, pd.DataFrame):
+                self.initsamples = self.initsamples.values
+
+        # Extract number of steps per walker
+        if 'n_steps' in BayesObj.mcmc_params:
+            self.nsteps = int(BayesObj.mcmc_params['n_steps'])
+        # Extract number of walkers (chains)
+        if 'n_walkers' in BayesObj.mcmc_params:
+            self.nwalkers = int(BayesObj.mcmc_params['n_walkers'])
+        # Extract moves
+        if 'moves' in BayesObj.mcmc_params:
+            self.moves = BayesObj.mcmc_params['moves']
+        # Extract multiprocessing
+        if 'multiprocessing' in BayesObj.mcmc_params:
+            self.mp = BayesObj.mcmc_params['multiprocessing']
+        # Extract verbose
+        if 'verbose' in BayesObj.mcmc_params:
+            self.verbose = BayesObj.mcmc_params['verbose']
+
         # Set initial samples
         np.random.seed(0)
         if self.initsamples is None:
             try:
-                initsamples = BayesObj.engine.ExpDesign.JDist.sample(self.nwalkers).T
-                initsamples = np.swapaxes(np.array([initsamples]),0,1) # TODO: test if this still works with multiple input dists
+                initsamples = priorDist.sample(self.nwalkers).T
             except:
                 # when aPCE selected - gaussian kernel distribution
-                inputSamples = self.BayesOpts.engine.ExpDesign.raw_data.T
+                inputSamples = MetaModel.ExpDesign.raw_data.T
                 random_indices = np.random.choice(
                     len(inputSamples), size=self.nwalkers, replace=False
                     )
@@ -206,14 +125,16 @@ class MCMC:
                     initsamples[:, idx_dim] = dist.rvs(size=self.nwalkers)
 
                 # Update lower and upper
-                BayesObj.engine.MetaModel.ExpDesign.bound_tuples = bound_tuples
+                MetaModel.ExpDesign.bound_tuples = bound_tuples
 
         # Check if sigma^2 needs to be inferred
-        if Discrepancy.opt_sigma != 'B': # TODO: why !='B'?
+        if Discrepancy.opt_sigma != 'B':
             sigma2_samples = Discrepancy.get_sample(self.nwalkers)
 
             # Update initsamples
             initsamples = np.hstack((initsamples, sigma2_samples))
+
+            # Update ndim
             ndim = initsamples.shape[1]
 
             # Discrepancy bound
@@ -225,8 +146,10 @@ class MCMC:
         print("\n>>>> Bayesian inference with MCMC for "
               f"{self.BayesOpts.name} started. <<<<<<")
 
-        # Set up the backend and clear it in case the file already exists
-        backend = emcee.backends.HDFBackend(f"{self.out_dir}/emcee_sampler.h5")
+        # Set up the backend
+        filename = f"{output_dir}/emcee_sampler.h5"
+        backend = emcee.backends.HDFBackend(filename)
+        # Clear the backend in case the file already exists
         backend.reset(self.nwalkers, ndim)
 
         # Define emcee sampler
@@ -253,8 +176,8 @@ class MCMC:
                         )
 
                     # Reset sampler
-                    pos = pos.coords
                     sampler.reset()
+                    pos = pos.coords
                 else:
                     pos = initsamples
 
@@ -329,13 +252,13 @@ class MCMC:
                 # output current autocorrelation estimate
                 if self.verbose:
                     print(f"Mean autocorr. time estimate: {np.nanmean(tau):.3f}")
-                    list_gr = np.round(gelman_rubin(sampler.chain), 3)
+                    list_gr = np.round(self.gelman_rubin(sampler.chain), 3)
                     print("Gelman-Rubin Test*: ", list_gr)
 
                 # check convergence
                 converged = np.all(tau*autocorreverynsteps < sampler.iteration)
                 converged &= np.all(np.abs(tauold - tau) / tau < 0.01)
-                converged &= np.all(gelman_rubin(sampler.chain) < 1.1)
+                converged &= np.all(self.gelman_rubin(sampler.chain) < 1.1)
 
                 if converged:
                     break
@@ -354,7 +277,7 @@ class MCMC:
         thin = int(0.5*np.nanmin(tau)) if int(0.5*np.nanmin(tau)) != 0 else 1
         finalsamples = sampler.get_chain(discard=burnin, flat=True, thin=thin)
         acc_fr = np.nanmean(sampler.acceptance_fraction)
-        list_gr = np.round(gelman_rubin(sampler.chain[:, burnin:]), 3)
+        list_gr = np.round(self.gelman_rubin(sampler.chain[:, burnin:]), 3)
 
         # Print summary
         print('\n')
@@ -384,7 +307,7 @@ class MCMC:
 
         # Plot traces
         if self.verbose and self.nsteps < 10000:
-            pdf = PdfPages(self.out_dir+'/traceplots.pdf')
+            pdf = PdfPages(output_dir+'/traceplots.pdf')
             fig = plt.figure()
             for parIdx in range(ndim):
                 # Set up the axes with gridspec
@@ -411,6 +334,7 @@ class MCMC:
 
                 # Destroy the current plot
                 plt.clf()
+
             pdf.close()
 
         # plot development of autocorrelation estimate
@@ -424,9 +348,33 @@ class MCMC:
             plt.ylim(0, np.nanmax(taus)+0.1*(np.nanmax(taus)-np.nanmin(taus)))
             plt.xlabel("number of steps")
             plt.ylabel(r"mean $\hat{\tau}$")
-            fig1.savefig(f"{self.out_dir}/autocorrelation_time.pdf",
+            fig1.savefig(f"{output_dir}/autocorrelation_time.pdf",
                          bbox_inches='tight')
 
+        # logml_dict = self.marginal_llk_emcee(sampler, self.nburn, logp=None,
+        # maxiter=5000)
+        # print('\nThe Bridge Sampling Estimation is "
+        #       f"{logml_dict['logml']:.5f}.')
+
+        # # Posterior-based expectation of posterior probablity
+        # postExpPostLikelihoods = np.mean(sampler.get_log_prob(flat=True)
+        # [self.nburn*self.nwalkers:])
+
+        # # Posterior-based expectation of prior densities
+        # postExpPrior = np.mean(self.log_prior(emcee_trace.T))
+
+        # # Posterior-based expectation of likelihoods
+        # postExpLikelihoods_emcee = postExpPostLikelihoods - postExpPrior
+
+        # # Calculate Kullback-Leibler Divergence
+        # KLD_emcee = postExpLikelihoods_emcee - logml_dict['logml']
+        # print("Kullback-Leibler divergence: %.5f"%KLD_emcee)
+
+        # # Information Entropy based on Entropy paper Eq. 38
+        # infEntropy_emcee = logml_dict['logml'] - postExpPrior -
+        #                    postExpLikelihoods_emcee
+        # print("Information Entropy: %.5f" %infEntropy_emcee)
+
         Posterior_df = pd.DataFrame(finalsamples, columns=par_names)
 
         return Posterior_df
@@ -449,7 +397,8 @@ class MCMC:
             returned otherwise an array.
 
         """
-        MetaModel = self.BayesOpts.engine.MetaModel
+
+        MetaModel = self.BayesOpts.MetaModel
         Discrepancy = self.BayesOpts.Discrepancy
 
         # Find the number of sigma2 parameters
@@ -468,7 +417,7 @@ class MCMC:
 
         for i in range(nsamples):
             # Check if the sample is within the parameters' range
-            if _check_ranges(theta[i], params_range):
+            if self._check_ranges(theta[i], params_range):
                 # Check if all dists are uniform, if yes priors are equal.
                 if all(MetaModel.input_obj.Marginals[i].dist_type == 'uniform'
                        for i in range(MetaModel.n_params)):
@@ -480,7 +429,7 @@ class MCMC:
 
                 # Check if bias term needs to be inferred
                 if Discrepancy.opt_sigma != 'B':
-                    if _check_ranges(theta[i, -n_sigma2:],
+                    if self._check_ranges(theta[i, -n_sigma2:],
                                           disc_bound_tuples):
                         if all('unif' in disc_marginals[i].dist_type for i in
                                range(Discrepancy.ExpDesign.ndim)):
@@ -514,20 +463,21 @@ class MCMC:
         """
 
         BayesOpts = self.BayesOpts
-        MetaModel = BayesOpts.engine.MetaModel
+        MetaModel = BayesOpts.MetaModel
         Discrepancy = self.BayesOpts.Discrepancy
 
         # Find the number of sigma2 parameters
         if Discrepancy.opt_sigma != 'B':
             disc_bound_tuples = Discrepancy.ExpDesign.bound_tuples
             n_sigma2 = len(disc_bound_tuples)
-            # Check if bias term should be inferred
+        else:
+            n_sigma2 = -len(theta)
+        # Check if bias term needs to be inferred
+        if Discrepancy.opt_sigma != 'B':
             sigma2 = theta[:, -n_sigma2:]
             theta = theta[:, :-n_sigma2]
         else:
-            n_sigma2 = -len(theta)
             sigma2 = None
-        
         theta = theta if theta.ndim != 1 else theta.reshape((1, -1))
 
         # Evaluate Model/MetaModel at theta
@@ -611,11 +561,12 @@ class MCMC:
         """
 
         BayesObj = self.BayesOpts
+        MetaModel = BayesObj.MetaModel
         Model = BayesObj.engine.Model
 
         if BayesObj.emulator:
             # Evaluate the MetaModel
-            mean_pred, std_pred = BayesObj.engine.MetaModel.eval_metamodel(samples=theta)
+            mean_pred, std_pred = MetaModel.eval_metamodel(samples=theta)
         else:
             # Evaluate the origModel
             mean_pred, std_pred = dict(), dict()
@@ -659,7 +610,8 @@ class MCMC:
             A error model.
 
         """
-        MetaModel = self.BayesOpts.engine.MetaModel
+        BayesObj = self.BayesOpts
+        MetaModel = BayesObj.MetaModel
 
         # Prepare the poster samples
         try:
@@ -684,6 +636,274 @@ class MCMC:
 
         # Train a GPR meta-model using MAP
         error_MetaModel = MetaModel.create_model_error(
-            self.BayesOpts.BiasInputs, y_map, name='Calib')
+            BayesObj.BiasInputs, y_map, name='Calib')
 
         return error_MetaModel
+
+    # -------------------------------------------------------------------------
+    def gelman_rubin(self, chain, return_var=False):
+        """
+        The potential scale reduction factor (PSRF) defined by the variance
+        within one chain, W, with the variance between chains B.
+        Both variances are combined in a weighted sum to obtain an estimate of
+        the variance of a parameter \\( \\theta \\).The square root of the
+        ratio of this estimates variance to the within chain variance is called
+        the potential scale reduction.
+        For a well converged chain it should approach 1. Values greater than
+        1.1 typically indicate that the chains have not yet fully converged.
+
+        Source: http://joergdietrich.github.io/emcee-convergence.html
+
+        https://github.com/jwalton3141/jwalton3141.github.io/blob/master/assets/posts/ESS/rwmh.py
+
+        Parameters
+        ----------
+        chain : array (n_walkers, n_steps, n_params)
+            The emcee ensamples.
+
+        Returns
+        -------
+        R_hat : float
+            The Gelman-Robin values.
+
+        """
+        m_chains, n_iters = chain.shape[:2]
+
+        # Calculate between-chain variance
+        θb = np.mean(chain, axis=1)
+        θbb = np.mean(θb, axis=0)
+        B_over_n = ((θbb - θb)**2).sum(axis=0)
+        B_over_n /= (m_chains - 1)
+
+        # Calculate within-chain variances
+        ssq = np.var(chain, axis=1, ddof=1)
+        W = np.mean(ssq, axis=0)
+
+        # (over) estimate of variance
+        var_θ = W * (n_iters - 1) / n_iters + B_over_n
+
+        if return_var:
+            return var_θ
+        else:
+            # The square root of the ratio of this estimates variance to the
+            # within chain variance
+            R_hat = np.sqrt(var_θ / W)
+            return R_hat
+
+    # -------------------------------------------------------------------------
+    def marginal_llk_emcee(self, sampler, nburn=None, logp=None, maxiter=1000):
+        """
+        The Bridge Sampling Estimator of the Marginal Likelihood based on
+        https://gist.github.com/junpenglao/4d2669d69ddfe1d788318264cdcf0583
+
+        Parameters
+        ----------
+        sampler : TYPE
+            MultiTrace, result of MCMC run.
+        nburn : int, optional
+            Number of burn-in step. The default is None.
+        logp : TYPE, optional
+            Model Log-probability function. The default is None.
+        maxiter : int, optional
+            Maximum number of iterations. The default is 1000.
+
+        Returns
+        -------
+        marg_llk : dict
+            Estimated Marginal log-Likelihood.
+
+        """
+        r0, tol1, tol2 = 0.5, 1e-10, 1e-4
+
+        if logp is None:
+            logp = sampler.log_prob_fn
+
+        # Split the samples into two parts
+        # Use the first 50% for fiting the proposal distribution
+        # and the second 50% in the iterative scheme.
+        if nburn is None:
+            mtrace = sampler.chain
+        else:
+            mtrace = sampler.chain[:, nburn:, :]
+
+        nchain, len_trace, nrofVars = mtrace.shape
+
+        N1_ = len_trace // 2
+        N1 = N1_*nchain
+        N2 = len_trace*nchain - N1
+
+        samples_4_fit = np.zeros((nrofVars, N1))
+        samples_4_iter = np.zeros((nrofVars, N2))
+        effective_n = np.zeros((nrofVars))
+
+        # matrix with already transformed samples
+        for var in range(nrofVars):
+
+            # for fitting the proposal
+            x = mtrace[:, :N1_, var]
+
+            samples_4_fit[var, :] = x.flatten()
+            # for the iterative scheme
+            x2 = mtrace[:, N1_:, var]
+            samples_4_iter[var, :] = x2.flatten()
+
+            # effective sample size of samples_4_iter, scalar
+            effective_n[var] = self._my_ESS(x2)
+
+        # median effective sample size (scalar)
+        neff = np.median(effective_n)
+
+        # get mean & covariance matrix and generate samples from proposal
+        m = np.mean(samples_4_fit, axis=1)
+        V = np.cov(samples_4_fit)
+        L = chol(V, lower=True)
+
+        # Draw N2 samples from the proposal distribution
+        gen_samples = m[:, None] + np.dot(
+            L, st.norm.rvs(0, 1, size=samples_4_iter.shape)
+            )
+
+        # Evaluate proposal distribution for posterior & generated samples
+        q12 = st.multivariate_normal.logpdf(samples_4_iter.T, m, V)
+        q22 = st.multivariate_normal.logpdf(gen_samples.T, m, V)
+
+        # Evaluate unnormalized posterior for posterior & generated samples
+        q11 = logp(samples_4_iter.T)
+        q21 = logp(gen_samples.T)
+
+        # Run iterative scheme:
+        tmp = self._iterative_scheme(
+            N1, N2, q11, q12, q21, q22, r0, neff, tol1, maxiter, 'r'
+            )
+        if ~np.isfinite(tmp['logml']):
+            warnings.warn(
+                "Logml could not be estimated within maxiter, rerunning with "
+                "adjusted starting value. Estimate might be more variable than"
+                " usual.")
+            # use geometric mean as starting value
+            r0_2 = np.sqrt(tmp['r_vals'][-2]*tmp['r_vals'][-1])
+            tmp = self._iterative_scheme(
+                q11, q12, q21, q22, r0_2, neff, tol2, maxiter, 'logml'
+                )
+
+        marg_llk = dict(
+            logml=tmp['logml'], niter=tmp['niter'], method="normal",
+            q11=q11, q12=q12, q21=q21, q22=q22
+            )
+        return marg_llk
+
+    # -------------------------------------------------------------------------
+    def _iterative_scheme(self, N1, N2, q11, q12, q21, q22, r0, neff, tol,
+                          maxiter, criterion):
+        """
+        Iterative scheme as proposed in Meng and Wong (1996) to estimate the
+        marginal likelihood
+
+        """
+        l1 = q11 - q12
+        l2 = q21 - q22
+        # To increase numerical stability,
+        # subtracting the median of l1 from l1 & l2 later
+        lstar = np.median(l1)
+        s1 = neff/(neff + N2)
+        s2 = N2/(neff + N2)
+        r = r0
+        r_vals = [r]
+        logml = np.log(r) + lstar
+        criterion_val = 1 + tol
+
+        i = 0
+        while (i <= maxiter) & (criterion_val > tol):
+            rold = r
+            logmlold = logml
+            numi = np.exp(l2 - lstar)/(s1 * np.exp(l2 - lstar) + s2 * r)
+            deni = 1/(s1 * np.exp(l1 - lstar) + s2 * r)
+            if np.sum(~np.isfinite(numi))+np.sum(~np.isfinite(deni)) > 0:
+                warnings.warn(
+                    """Infinite value in iterative scheme, returning NaN.
+                     Try rerunning with more samples.""")
+            r = (N1/N2) * np.sum(numi)/np.sum(deni)
+            r_vals.append(r)
+            logml = np.log(r) + lstar
+            i += 1
+            if criterion == 'r':
+                criterion_val = np.abs((r - rold)/r)
+            elif criterion == 'logml':
+                criterion_val = np.abs((logml - logmlold)/logml)
+
+        if i >= maxiter:
+            return dict(logml=np.NaN, niter=i, r_vals=np.asarray(r_vals))
+        else:
+            return dict(logml=logml, niter=i)
+
+    # -------------------------------------------------------------------------
+    def _my_ESS(self, x):
+        """
+        Compute the effective sample size of estimand of interest.
+        Vectorised implementation.
+        https://github.com/jwalton3141/jwalton3141.github.io/blob/master/assets/posts/ESS/rwmh.py
+
+
+        Parameters
+        ----------
+        x : array of shape (n_walkers, n_steps)
+            MCMC Samples.
+
+        Returns
+        -------
+        int
+            Effective sample size.
+
+        """
+        m_chains, n_iters = x.shape
+
+        def variogram(t):
+            variogram = ((x[:, t:] - x[:, :(n_iters - t)])**2).sum()
+            variogram /= (m_chains * (n_iters - t))
+            return variogram
+
+        post_var = self.gelman_rubin(x, return_var=True)
+
+        t = 1
+        rho = np.ones(n_iters)
+        negative_autocorr = False
+
+        # Iterate until the sum of consecutive estimates of autocorrelation is
+        # negative
+        while not negative_autocorr and (t < n_iters):
+            rho[t] = 1 - variogram(t) / (2 * post_var)
+
+            if not t % 2:
+                negative_autocorr = sum(rho[t-1:t+1]) < 0
+
+            t += 1
+
+        return int(m_chains*n_iters / (1 + 2*rho[1:t].sum()))
+
+    # -------------------------------------------------------------------------
+    def _check_ranges(self, theta, ranges):
+        """
+        This function checks if theta lies in the given ranges.
+
+        Parameters
+        ----------
+        theta : array
+            Proposed parameter set.
+        ranges : nested list
+            List of the praremeter ranges.
+
+        Returns
+        -------
+        c : bool
+            If it lies in the given range, it return True else False.
+
+        """
+        c = True
+        # traverse in the list1
+        for i, bounds in enumerate(ranges):
+            x = theta[i]
+            # condition check
+            if x < bounds[0] or x > bounds[1]:
+                c = False
+                return c
+        return c
diff --git a/src/bayesvalidrox/pylink/pylink.py b/src/bayesvalidrox/pylink/pylink.py
index 637f42317..227a51ab3 100644
--- a/src/bayesvalidrox/pylink/pylink.py
+++ b/src/bayesvalidrox/pylink/pylink.py
@@ -231,7 +231,7 @@ class PyLinkForwardModel(object):
                 self.observations_valid = self.observations_valid
             else:
                 raise Exception("Please provide the observation data as a "
-                                "dictionary via observations_valid attribute or pass"
+                                "dictionary via observations attribute or pass"
                                 " the csv-file path to MeasurementFile "
                                 "attribute")
             # Compute the number of observation
diff --git a/src/bayesvalidrox/surrogate_models/__init__.py b/src/bayesvalidrox/surrogate_models/__init__.py
index 6d8ce9f1c..70bfb20f5 100644
--- a/src/bayesvalidrox/surrogate_models/__init__.py
+++ b/src/bayesvalidrox/surrogate_models/__init__.py
@@ -1,12 +1,7 @@
 # -*- coding: utf-8 -*-
-from .engine import Engine
-from .exp_designs import ExpDesigns
-from .input_space import InputSpace
+
 from .surrogate_models import MetaModel
 
 __all__ = [
-    "MetaModel",
-    "InputSpace",
-    "ExpDesigns",
-    "Engine"
+    "MetaModel"
     ]
diff --git a/src/bayesvalidrox/surrogate_models/desktop.ini b/src/bayesvalidrox/surrogate_models/desktop.ini
deleted file mode 100644
index 632de13ae..000000000
--- a/src/bayesvalidrox/surrogate_models/desktop.ini
+++ /dev/null
@@ -1,2 +0,0 @@
-[LocalizedFileNames]
-exploration.py=@exploration.py,0
diff --git a/src/bayesvalidrox/surrogate_models/engine.py b/src/bayesvalidrox/surrogate_models/engine.py
index 1c8fa56e6..42307d477 100644
--- a/src/bayesvalidrox/surrogate_models/engine.py
+++ b/src/bayesvalidrox/surrogate_models/engine.py
@@ -3,27 +3,35 @@
 Engine to train the surrogate
 
 """
+import copy
 from copy import deepcopy, copy
+import h5py
 import joblib
-from joblib import Parallel, delayed
-import matplotlib.pyplot as plt
-import multiprocessing
 import numpy as np
 import os
-import pandas as pd
-import pathlib
-import scipy.optimize as opt
+
 from scipy import stats, signal, linalg, sparse
 from scipy.spatial import distance
+from tqdm import tqdm
+import scipy.optimize as opt
 from sklearn.metrics import mean_squared_error
-import seaborn as sns
+import multiprocessing
+import matplotlib.pyplot as plt
+import pandas as pd
 import sys
-from tqdm import tqdm
+import seaborn as sns
+from joblib import Parallel, delayed
+
 
 from bayesvalidrox.bayes_inference.bayes_inference import BayesInference
 from bayesvalidrox.bayes_inference.discrepancy import Discrepancy
 from .exploration import Exploration
+import pathlib
 
+#from .inputs import Input
+#from .exp_designs import ExpDesigns
+#from .surrogate_models import MetaModel
+#from bayesvalidrox.post_processing.post_processing import PostProcessing
 
 def hellinger_distance(P, Q):
     """
@@ -47,17 +55,17 @@ def hellinger_distance(P, Q):
 
     """
     P = np.array(P)
-    Q = np.array(Q)
-
+    Q= np.array(Q)
+    
     mu1 = P.mean()
     Sigma1 = np.std(P)
 
     mu2 = Q.mean()
     Sigma2 = np.std(Q)
 
-    term1 = np.sqrt(2 * Sigma1 * Sigma2 / (Sigma1 ** 2 + Sigma2 ** 2))
+    term1 = np.sqrt(2*Sigma1*Sigma2 / (Sigma1**2 + Sigma2**2))
 
-    term2 = np.exp(-.25 * (mu1 - mu2) ** 2 / (Sigma1 ** 2 + Sigma2 ** 2))
+    term2 = np.exp(-.25 * (mu1 - mu2)**2 / (Sigma1**2 + Sigma2**2))
 
     H_squared = 1 - term1 * term2
 
@@ -92,10 +100,9 @@ def logpdf(x, mean, cov):
 
     return log_lik
 
-
 def subdomain(Bounds, n_new_samples):
     """
-    Divides a domain defined by Bounds into subdomains.
+    Divides a domain defined by Bounds into sub domains.
 
     Parameters
     ----------
@@ -103,6 +110,8 @@ def subdomain(Bounds, n_new_samples):
         List of lower and upper bounds.
     n_new_samples : int
         Number of samples to divide the domain for.
+    n_params : int
+        The number of params to build the subdomains for
 
     Returns
     -------
@@ -118,41 +127,23 @@ def subdomain(Bounds, n_new_samples):
         LinSpace[i] = np.linspace(start=Bounds[i][0], stop=Bounds[i][1],
                                   num=n_subdomains)
     Subdomains = []
-    for k in range(n_subdomains - 1):
+    for k in range(n_subdomains-1):
         mylist = []
         for i in range(n_params):
-            mylist.append((LinSpace[i, k + 0], LinSpace[i, k + 1]))
+            mylist.append((LinSpace[i, k+0], LinSpace[i, k+1]))
         Subdomains.append(tuple(mylist))
 
     return Subdomains
 
-
-class Engine:
-
+class Engine():
+    
+    
     def __init__(self, MetaMod, Model, ExpDes):
         self.MetaModel = MetaMod
         self.Model = Model
         self.ExpDesign = ExpDes
         self.parallel = False
-        self.trained = False
-
-        # Init other parameters
-        self.bound_tuples = None
-        self.errorModel = None
-        self.LCerror = None
-        self.n_obs = None
-        self.observations = None
-        self.out_names = None
-        self.seqMinDist = None
-        self.seqRMSEStd = None
-        self.SeqKLD = None
-        self.SeqDistHellinger = None
-        self.SeqBME = None
-        self.seqValidError = None
-        self.SeqModifiedLOO = None
-        self.valid_likelihoods = None
-        self._y_hat_prev = None
-
+        
     def start_engine(self) -> None:
         """
         Do all the preparations that need to be run before the actual training
@@ -164,8 +155,9 @@ class Engine:
         """
         self.out_names = self.Model.Output.names
         self.MetaModel.out_names = self.out_names
-
-    def train_normal(self, parallel=False, verbose=False, save=False) -> None:
+        
+        
+    def train_normal(self, parallel = False, verbose = False, save = False) -> None:
         """
         Trains surrogate on static samples only.
         Samples are taken from the experimental design and the specified 
@@ -178,12 +170,10 @@ class Engine:
         None
 
         """
-        if self.out_names == 'None':
-            self.start_engine()
-
+            
         ExpDesign = self.ExpDesign
         MetaModel = self.MetaModel
-
+        
         # Read ExpDesign (training and targets) from the provided hdf5
         if ExpDesign.hdf5_file is not None:
             # TODO: need to run 'generate_ED' as well after this or not?
@@ -192,52 +182,51 @@ class Engine:
             # Check if an old hdf5 file exists: if yes, rename it
             hdf5file = f'ExpDesign_{self.Model.name}.hdf5'
             if os.path.exists(hdf5file):
-                #     os.rename(hdf5file, 'old_'+hdf5file)
+           #     os.rename(hdf5file, 'old_'+hdf5file)
                 file = pathlib.Path(hdf5file)
                 file.unlink()
 
         # Prepare X samples 
         # For training the surrogate use ExpDesign.X_tr, ExpDesign.X is for the model to run on 
         ExpDesign.generate_ED(ExpDesign.n_init_samples,
-                              transform=True,
-                              max_pce_deg=np.max(MetaModel.pce_deg))
-
+                                              transform=True,
+                                              max_pce_deg=np.max(MetaModel.pce_deg))
+        
         # Run simulations at X 
         if not hasattr(ExpDesign, 'Y') or ExpDesign.Y is None:
             print('\n Now the forward model needs to be run!\n')
-            ED_Y, up_ED_X = self.Model.run_model_parallel(ExpDesign.X, mp=parallel)
+            ED_Y, up_ED_X = self.Model.run_model_parallel(ExpDesign.X, mp = parallel)
             ExpDesign.Y = ED_Y
         else:
             # Check if a dict has been passed.
             if not type(ExpDesign.Y) is dict:
                 raise Exception('Please provide either a dictionary or a hdf5'
                                 'file to ExpDesign.hdf5_file argument.')
-
+                
         # Separate output dict and x-values
         if 'x_values' in ExpDesign.Y:
             ExpDesign.x_values = ExpDesign.Y['x_values']
             del ExpDesign.Y['x_values']
         else:
             print('No x_values are given, this might lead to issues during PostProcessing')
-
+        
+        
         # Fit the surrogate
         MetaModel.fit(ExpDesign.X, ExpDesign.Y, parallel, verbose)
-
+        
         # Save what there is to save
         if save:
             # Save surrogate
             with open(f'surrogates/surrogate_{self.Model.name}.pk1', 'wb') as output:
                 joblib.dump(MetaModel, output, 2)
-
+                    
             # Zip the model run directories
-            if self.Model.link_type.lower() == 'pylink' and \
-                    self.ExpDesign.sampling_method.lower() != 'user':
+            if self.Model.link_type.lower() == 'pylink' and\
+               self.ExpDesign.sampling_method.lower() != 'user':
                 self.Model.zip_subdirs(self.Model.name, f'{self.Model.name}_')
-
-        # Set that training was done
-        self.trained = True
-
-    def train_sequential(self, parallel=False, verbose=False) -> None:
+                
+            
+    def train_sequential(self, parallel = False, verbose = False) -> None:
         """
         Train the surrogate in a sequential manner.
         First build and train evereything on the static samples, then iterate
@@ -248,21 +237,22 @@ class Engine:
         None
 
         """
-        # self.train_normal(parallel, verbose)
+        #self.train_normal(parallel, verbose)
         self.parallel = parallel
         self.train_seq_design(parallel, verbose)
-
+        
+        
     # -------------------------------------------------------------------------
     def eval_metamodel(self, samples=None, nsamples=None,
                        sampling_method='random', return_samples=False):
         """
-        Evaluates metamodel at the requested samples. One can also generate
+        Evaluates meta-model at the requested samples. One can also generate
         nsamples.
 
         Parameters
         ----------
         samples : array of shape (n_samples, n_params), optional
-            Samples to evaluate metamodel at. The default is None.
+            Samples to evaluate meta-model at. The default is None.
         nsamples : int, optional
             Number of samples to generate, if no `samples` is provided. The
             default is None.
@@ -285,7 +275,7 @@ class Engine:
             samples = self.ExpDesign.generate_samples(
                 nsamples,
                 sampling_method
-            )
+                )
 
         # Transformation to other space is to be done in the MetaModel
         # TODO: sort the transformations better
@@ -295,9 +285,10 @@ class Engine:
             return mean_pred, std_pred, samples
         else:
             return mean_pred, std_pred
-
+        
+        
     # -------------------------------------------------------------------------
-    def train_seq_design(self, parallel=False, verbose=False):
+    def train_seq_design(self, parallel = False, verbose = False):
         """
         Starts the adaptive sequential design for refining the surrogate model
         by selecting training points in a sequential manner.
@@ -309,7 +300,7 @@ class Engine:
 
         """
         self.parallel = parallel
-
+        
         # Initialization
         self.SeqModifiedLOO = {}
         self.seqValidError = {}
@@ -319,13 +310,14 @@ class Engine:
         self.seqRMSEMean = {}
         self.seqRMSEStd = {}
         self.seqMinDist = []
-
-        if not hasattr(self.MetaModel, 'valid_samples') or self.MetaModel.valid_samples is None:
+        
+        if not hasattr(self.MetaModel, 'valid_samples'):
             self.ExpDesign.valid_samples = []
             self.ExpDesign.valid_model_runs = []
             self.valid_likelihoods = []
+        
+        validError = None
 
-        # validError = None
 
         # Determine the metamodel type
         if self.MetaModel.meta_model_type.lower() != 'gpe':
@@ -344,15 +336,14 @@ class Engine:
         n_replication = self.ExpDesign.n_replication
         util_func = self.ExpDesign.util_func
         output_name = self.out_names
-
+        
         # Handle if only one UtilityFunctions is provided
         if not isinstance(util_func, list):
             util_func = [self.ExpDesign.util_func]
 
         # Read observations or MCReference
         # TODO: recheck the logic in this if statement
-        if (len(self.Model.observations) != 0 or self.Model.meas_file is not None) and hasattr(self.MetaModel,
-                                                                                               'Discrepancy'):
+        if (len(self.Model.observations) != 0 or self.Model.meas_file is not None) and hasattr(self.MetaModel, 'Discrepancy'):
             self.observations = self.Model.read_observation()
             obs_data = self.observations
         else:
@@ -360,16 +351,15 @@ class Engine:
             # TODO: TotalSigma2 not defined if not in this else???
             # TODO: no self.observations if in here
             TotalSigma2 = {}
-
+            
         # ---------- Initial self.MetaModel ----------
-        if not self.trained:
-            self.train_normal(parallel=parallel, verbose=verbose)
-
+        self.train_normal(parallel = parallel, verbose=verbose)
+        
         initMetaModel = deepcopy(self.MetaModel)
 
         # Validation error if validation set is provided.
         if self.ExpDesign.valid_model_runs:
-            init_rmse, init_valid_error = self._validError()  # initMetaModel)
+            init_rmse, init_valid_error = self._validError(initMetaModel)
             init_valid_error = list(init_valid_error.values())
         else:
             init_rmse = None
@@ -389,7 +379,7 @@ class Engine:
             if post_snapshot:
                 parNames = self.ExpDesign.par_names
                 print('Posterior snapshot (initial) is being plotted...')
-                self._posteriorPlot(init_post, parNames, 'SeqPosterior_init')
+                self.__posteriorPlot(init_post, parNames, 'SeqPosterior_init')
 
         # Check the convergence of the Mean & Std
         if mc_ref and pce:
@@ -400,14 +390,13 @@ class Engine:
         # Read the initial experimental design
         Xinit = self.ExpDesign.X
         init_n_samples = len(self.ExpDesign.X)
-        initYprev = self.ExpDesign.Y  # initMetaModel.ModelOutputDict
-        # self.MetaModel.ModelOutputDict = self.ExpDesign.Y
+        initYprev = self.ExpDesign.Y#initMetaModel.ModelOutputDict
+        #self.MetaModel.ModelOutputDict = self.ExpDesign.Y
         initLCerror = initMetaModel.LCerror
         n_itrs = max_n_samples - init_n_samples
 
-        # Get some initial statistics
+        ## Get some initial statistics
         # Read the initial ModifiedLOO
-        init_mod_LOO = []
         if pce:
             Scores_all, varExpDesignY = [], []
             for out_name in output_name:
@@ -423,14 +412,14 @@ class Engine:
 
             Scores = [item for sublist in Scores_all for item in sublist]
             weights = [item for sublist in varExpDesignY for item in sublist]
-            init_mod_LOO = [np.average([1 - score for score in Scores],
+            init_mod_LOO = [np.average([1-score for score in Scores],
                                        weights=weights)]
 
         prevMetaModel_dict = {}
-        # prevExpDesign_dict = {}
+        #prevExpDesign_dict = {}
         # Can run sequential design multiple times for comparison
         for repIdx in range(n_replication):
-            print(f'\n>>>> Replication: {repIdx + 1}<<<<')
+            print(f'\n>>>> Replication: {repIdx+1}<<<<')
 
             # util_func: the function to use inside the type of exploitation
             for util_f in util_func:
@@ -446,6 +435,7 @@ class Engine:
                 Yprev = initYprev
 
                 Xfull = []
+                Yfull = []
 
                 # Store the initial ModifiedLOO
                 if pce:
@@ -467,23 +457,23 @@ class Engine:
 
                 # ------- Start Sequential Experimental Design -------
                 postcnt = 1
-                for itr_no in range(1, n_itrs + 1):
+                for itr_no in range(1, n_itrs+1):
                     print(f'\n>>>> Iteration number {itr_no} <<<<')
 
                     # Save the metamodel prediction before updating
                     prevMetaModel_dict[itr_no] = deepcopy(self.MetaModel)
-                    # prevExpDesign_dict[itr_no] = deepcopy(self.ExpDesign)
+                    #prevExpDesign_dict[itr_no] = deepcopy(self.ExpDesign)
                     if itr_no > 1:
-                        pc_model = prevMetaModel_dict[itr_no - 1]
+                        pc_model = prevMetaModel_dict[itr_no-1]
                         self._y_hat_prev, _ = pc_model.eval_metamodel(
                             samples=Xfull[-1].reshape(1, -1))
-                        del prevMetaModel_dict[itr_no - 1]
+                        del prevMetaModel_dict[itr_no-1]
 
                     # Optimal Bayesian Design
-                    # self.MetaModel.ExpDesignFlag = 'sequential'
+                    #self.MetaModel.ExpDesignFlag = 'sequential'
                     Xnew, updatedPrior = self.choose_next_sample(TotalSigma2,
-                                                                 n_canddidate,
-                                                                 util_f)
+                                                            n_canddidate,
+                                                            util_f)
                     S = np.min(distance.cdist(Xinit, Xnew, 'euclidean'))
                     self.seqMinDist.append(S)
                     print(f"\nmin Dist from OldExpDesign: {S:2f}")
@@ -492,19 +482,20 @@ class Engine:
                     # Evaluate the full model response at the new sample
                     Ynew, _ = self.Model.run_model_parallel(
                         Xnew, prevRun_No=total_n_samples
-                    )
+                        )
                     total_n_samples += Xnew.shape[0]
 
                     # ------ Plot the surrogate model vs Origninal Model ------
-                    if self.ExpDesign.adapt_verbose:
+                    if hasattr(self.ExpDesign, 'adapt_verbose') and \
+                       self.ExpDesign.adapt_verbose:
                         from .adaptPlot import adaptPlot
                         y_hat, std_hat = self.MetaModel.eval_metamodel(
                             samples=Xnew
-                        )
+                            )
                         adaptPlot(
                             self.MetaModel, Ynew, y_hat, std_hat,
                             plotED=False
-                        )
+                            )
 
                     # -------- Retrain the surrogate model -------
                     # Extend new experimental design
@@ -518,11 +509,11 @@ class Engine:
                     # Pass new design to the metamodel object
                     self.ExpDesign.sampling_method = 'user'
                     self.ExpDesign.X = Xfull
-                    # self.ExpDesign.Y = self.MetaModel.ModelOutputDict
+                    #self.ExpDesign.Y = self.MetaModel.ModelOutputDict
 
                     # Save the Experimental Design for next iteration
                     Xprev = Xfull
-                    Yprev = self.ExpDesign.Y
+                    Yprev = self.ExpDesign.Y 
 
                     # Pass the new prior as the input
                     # TODO: another look at this - no difference apc to pce to gpe?
@@ -559,7 +550,7 @@ class Engine:
                         weights = [item for sublist in varExpDesignY for item
                                    in sublist]
                         ModifiedLOO = [np.average(
-                            [1 - score for score in Scores], weights=weights)]
+                            [1-score for score in Scores], weights=weights)]
 
                         print('\n')
                         print(f"Updated ModifiedLOO {util_f}:\n", ModifiedLOO)
@@ -567,7 +558,7 @@ class Engine:
 
                     # Compute the validation error
                     if self.ExpDesign.valid_model_runs:
-                        rmse, validError = self._validError()  # self.MetaModel)
+                        rmse, validError = self._validError(self.MetaModel)
                         ValidError = list(validError.values())
                     else:
                         rmse = None
@@ -595,8 +586,8 @@ class Engine:
                         if post_snapshot and postcnt % step_snapshot == 0:
                             parNames = self.ExpDesign.par_names
                             print('Posterior snapshot is being plotted...')
-                            self._posteriorPlot(Posterior, parNames,
-                                                f'SeqPosterior_{postcnt}')
+                            self.__posteriorPlot(Posterior, parNames,
+                                                 f'SeqPosterior_{postcnt}')
                         postcnt += 1
 
                     # Check the convergence of the Mean&Std
@@ -626,11 +617,11 @@ class Engine:
                     if len(obs_data) != 0:
                         del out
                     print()
-                    print('-' * 50)
+                    print('-'*50)
                     print()
 
                 # Store updated ModifiedLOO and BME in dictonary
-                strKey = f'{util_f}_rep_{repIdx + 1}'
+                strKey = f'{util_f}_rep_{repIdx+1}'
                 if pce:
                     self.SeqModifiedLOO[strKey] = SeqModifiedLOO
                 if len(self.ExpDesign.valid_model_runs) != 0:
@@ -641,7 +632,7 @@ class Engine:
                     self.SeqBME[strKey] = SeqBME
                     self.SeqKLD[strKey] = SeqKLD
                 if hasattr(self.MetaModel, 'valid_likelihoods') and \
-                        self.valid_likelihoods:
+                   self.valid_likelihoods:
                     self.SeqDistHellinger[strKey] = SeqDistHellinger
                 if mc_ref and pce:
                     self.seqRMSEMean[strKey] = seqRMSEMean
@@ -664,7 +655,7 @@ class Engine:
             Candidate samples.
         index : int
             Model output index.
-        util_func : string, optional
+        UtilMethod : string, optional
             Exploitation utility function. The default is 'Entropy'.
 
         Returns
@@ -682,11 +673,10 @@ class Engine:
         X_can = X_can.reshape(1, -1)
         Y_PC_can, std_PC_can = MetaModel.eval_metamodel(samples=X_can)
 
-        score = None
         if util_func.lower() == 'alm':
             # ----- Entropy/MMSE/active learning MacKay(ALM)  -----
             # Compute perdiction variance of the old model
-            canPredVar = {key: std_PC_can[key] ** 2 for key in out_names}
+            canPredVar = {key: std_PC_can[key]**2 for key in out_names}
 
             varPCE = np.zeros((len(out_names), X_can.shape[0]))
             for KeyIdx, key in enumerate(out_names):
@@ -701,7 +691,7 @@ class Engine:
 
             # Compute perdiction error and variance of the old model
             predError = {key: Y_PC_can[key] for key in out_names}
-            canPredVar = {key: std_PC_can[key] ** 2 for key in out_names}
+            canPredVar = {key: std_PC_can[key]**2 for key in out_names}
 
             # Compute perdiction error and variance of the old model
             # Eq (5) from Liu et al.(2018)
@@ -709,10 +699,10 @@ class Engine:
             for KeyIdx, key in enumerate(out_names):
                 residual = predError[key] - out_dict_y[key][int(index)]
                 var = canPredVar[key]
-                EIGF_PCE[KeyIdx] = np.max(residual ** 2 + var, axis=1)
+                EIGF_PCE[KeyIdx] = np.max(residual**2 + var, axis=1)
             score = np.max(EIGF_PCE, axis=0)
 
-        return -1 * score  # -1 is for minimization instead of maximization
+        return -1 * score   # -1 is for minimization instead of maximization
 
     # -------------------------------------------------------------------------
     def util_BayesianActiveDesign(self, y_hat, std, sigma2Dict, var='DKL'):
@@ -726,8 +716,8 @@ class Engine:
 
         Parameters
         ----------
-        y_hat : unknown
-        std : unknown
+        X_can : array of shape (n_samples, n_params)
+            Candidate samples.
         sigma2Dict : dict
             A dictionary containing the measurement errors (sigma^2).
         var : string, optional
@@ -752,12 +742,14 @@ class Engine:
         # Sample a distribution for a normal dist
         # with Y_mean_can as the mean and Y_std_can as std.
         Y_MC, std_MC = {}, {}
-        logPriorLikelihoods = np.zeros(mc_size)
+        logPriorLikelihoods = np.zeros((mc_size))
+       # print(y_hat)
+       # print(list[y_hat])
         for key in list(y_hat):
-            cov = np.diag(std[key] ** 2)
-            print(key, y_hat[key], std[key])
+            cov = np.diag(std[key]**2)
+           # print(y_hat[key], cov)
             # TODO: added the allow_singular = True here
-            rv = stats.multivariate_normal(mean=y_hat[key], cov=cov, allow_singular=True)
+            rv = stats.multivariate_normal(mean=y_hat[key], cov=cov,)
             Y_MC[key] = rv.rvs(size=mc_size)
             logPriorLikelihoods += rv.logpdf(Y_MC[key])
             std_MC[key] = np.zeros((mc_size, y_hat[key].shape[0]))
@@ -765,16 +757,16 @@ class Engine:
         #  Likelihood computation (Comparison of data and simulation
         #  results via PCE with candidate design)
         likelihoods = self._normpdf(Y_MC, std_MC, obs_data, sigma2Dict)
-
+        
         # Rejection Step
         # Random numbers between 0 and 1
         unif = np.random.rand(1, mc_size)[0]
 
         # Reject the poorly performed prior
-        accepted = (likelihoods / np.max(likelihoods)) >= unif
+        accepted = (likelihoods/np.max(likelihoods)) >= unif
 
         # Prior-based estimation of BME
-        logBME = np.log(np.nanmean(likelihoods), dtype=np.longdouble)  # float128)
+        logBME = np.log(np.nanmean(likelihoods), dtype=np.longdouble)#float128)
 
         # Posterior-based expectation of likelihoods
         postLikelihoods = likelihoods[accepted]
@@ -786,7 +778,6 @@ class Engine:
         # Utility function Eq.2 in Ref. (2)
         # Posterior covariance matrix after observing data y
         # Kullback-Leibler Divergence (Sergey's paper)
-        U_J_d = None
         if var == 'DKL':
 
             # TODO: Calculate the correction factor for BME
@@ -822,7 +813,7 @@ class Engine:
             AIC = -2 * maxlogL + 2 * nModelParams
             # 2 * nModelParams * (nModelParams+1) / (n_obs-nModelParams-1)
             penTerm = 0
-            U_J_d = 1 * (AIC + penTerm)
+            U_J_d = 1*(AIC + penTerm)
 
         # Deviance information criterion
         elif var == 'DIC':
@@ -830,7 +821,7 @@ class Engine:
             N_star_p = 0.5 * np.var(np.log(likelihoods[likelihoods != 0]))
             Likelihoods_theta_mean = self._normpdf(
                 y_hat, std, obs_data, sigma2Dict
-            )
+                )
             DIC = -2 * np.log(Likelihoods_theta_mean) + 2 * N_star_p
 
             U_J_d = DIC
@@ -847,7 +838,7 @@ class Engine:
         del Y_MC
         del std_MC
 
-        return -1 * U_J_d  # -1 is for minimization instead of maximization
+        return -1 * U_J_d   # -1 is for minimization instead of maximization
 
     # -------------------------------------------------------------------------
     def util_BayesianDesign(self, X_can, X_MC, sigma2Dict, var='DKL'):
@@ -858,7 +849,6 @@ class Engine:
         ----------
         X_can : array of shape (n_samples, n_params)
             Candidate samples.
-        X_MC : unknown
         sigma2Dict : dict
             A dictionary containing the measurement errors (sigma^2).
         var : string, optional
@@ -897,11 +887,11 @@ class Engine:
         for key in oldExpDesignY.keys():
             NewExpDesignY[key] = np.vstack(
                 (oldExpDesignY[key], Y_PC_can[key])
-            )
+                )
 
         engine_can.ExpDesign.sampling_method = 'user'
         engine_can.ExpDesign.X = NewExpDesignX
-        # engine_can.ModelOutputDict = NewExpDesignY
+        #engine_can.ModelOutputDict = NewExpDesignY
         engine_can.ExpDesign.Y = NewExpDesignY
 
         # Train the model for the observed data using x_can
@@ -909,7 +899,7 @@ class Engine:
         engine_can.start_engine()
         engine_can.train_normal(parallel=False)
         engine_can.MetaModel.fit(NewExpDesignX, NewExpDesignY)
-        #        engine_can.train_norm_design(parallel=False)
+#        engine_can.train_norm_design(parallel=False)
 
         # Set the ExpDesign to its original values
         engine_can.ExpDesign.X = oldExpDesignX
@@ -917,7 +907,7 @@ class Engine:
         engine_can.ExpDesign.Y = oldExpDesignY
 
         if var.lower() == 'mi':
-            # Mutual information based on Krause et al.
+            # Mutual information based on Krause et al
             # Adapted from Beck & Guillas (MICE) paper
             _, std_PC_can = engine_can.MetaModel.eval_metamodel(samples=X_can)
             std_can = {key: std_PC_can[key] for key in out_names}
@@ -926,7 +916,7 @@ class Engine:
 
             varPCE = np.zeros((len(out_names)))
             for i, key in enumerate(out_names):
-                varPCE[i] = np.mean(std_old[key] ** 2 / std_can[key] ** 2)
+                varPCE[i] = np.mean(std_old[key]**2/std_can[key]**2)
             score = np.mean(varPCE)
 
             return -1 * score
@@ -942,9 +932,9 @@ class Engine:
             # Compute the score
             score = []
             for i, key in enumerate(out_names):
-                pce_var = Y_MC_std_can[key] ** 2
-                pce_var_can = Y_MC_std[key] ** 2
-                score.append(np.mean(pce_var - pce_var_can, axis=0))
+                pce_var = Y_MC_std_can[key]**2
+                pce_var_can = Y_MC_std[key]**2
+                score.append(np.mean(pce_var-pce_var_can, axis=0))
             score = np.mean(score)
 
             return -1 * score
@@ -954,14 +944,13 @@ class Engine:
         MCsize = X_MC.shape[0]
         ESS = 0
 
-        likelihoods = None
-        while (ESS > MCsize) or (ESS < 1):
+        while ((ESS > MCsize) or (ESS < 1)):
 
             # Enriching Monte Carlo samples if need be
             if ESS != 0:
                 X_MC = self.ExpDesign.generate_samples(
                     MCsize, 'random'
-                )
+                    )
 
             # Evaluate the MetaModel at the given samples
             Y_MC, std_MC = PCE_Model_can.eval_metamodel(samples=X_MC)
@@ -970,13 +959,13 @@ class Engine:
             # results via PCE with candidate design)
             likelihoods = self._normpdf(
                 Y_MC, std_MC, self.observations, sigma2Dict
-            )
+                )
 
             # Check the Effective Sample Size (1<ESS<MCsize)
-            ESS = 1 / np.sum(np.square(likelihoods / np.sum(likelihoods)))
+            ESS = 1 / np.sum(np.square(likelihoods/np.sum(likelihoods)))
 
             # Enlarge sample size if it doesn't fulfill the criteria
-            if (ESS > MCsize) or (ESS < 1):
+            if ((ESS > MCsize) or (ESS < 1)):
                 print("--- increasing MC size---")
                 MCsize *= 10
                 ESS = 0
@@ -986,20 +975,19 @@ class Engine:
         unif = np.random.rand(1, MCsize)[0]
 
         # Reject the poorly performed prior
-        accepted = (likelihoods / np.max(likelihoods)) >= unif
+        accepted = (likelihoods/np.max(likelihoods)) >= unif
 
         # -------------------- Utility functions --------------------
         # Utility function Eq.2 in Ref. (2)
         # Kullback-Leibler Divergence (Sergey's paper)
-        U_J_d = None
         if var == 'DKL':
 
             # Prior-based estimation of BME
-            logBME = np.log(np.nanmean(likelihoods, dtype=np.longdouble))  # float128))
+            logBME = np.log(np.nanmean(likelihoods, dtype=np.longdouble))#float128))
 
             # Posterior-based expectation of likelihoods
-            # postLikelihoods = likelihoods[accepted]
-            # postExpLikelihoods = np.mean(np.log(postLikelihoods))
+            postLikelihoods = likelihoods[accepted]
+            postExpLikelihoods = np.mean(np.log(postLikelihoods))
 
             # Haun et al implementation
             U_J_d = np.mean(np.log(likelihoods[likelihoods != 0]) - logBME)
@@ -1034,8 +1022,6 @@ class Engine:
             postExpLikelihoods = np.mean(np.log(postLikelihoods))
 
             # Posterior-based expectation of prior densities
-            logPriorLikelihoods = []
-            logPriorLikelihoods[accepted] = None  # TODO: this is not defined here, just a fix
             postExpPrior = np.mean(logPriorLikelihoods[accepted])
 
             infEntropy = logBME - postExpPrior - postExpLikelihoods
@@ -1062,7 +1048,8 @@ class Engine:
         del Y_MC
         del std_MC
 
-        return -1 * U_J_d  # -1 is for minimization instead of maximization
+        return -1 * U_J_d   # -1 is for minimization instead of maximization
+
 
     # -------------------------------------------------------------------------
     def run_util_func(self, method, candidates, index, sigma2Dict=None,
@@ -1105,7 +1092,7 @@ class Engine:
 
         elif method.lower() == 'bayesactdesign':
             NCandidate = candidates.shape[0]
-            U_J_d = np.zeros(NCandidate)
+            U_J_d = np.zeros((NCandidate))
             # Evaluate all candidates
             y_can, std_can = self.MetaModel.eval_metamodel(samples=candidates)
             # loop through candidates
@@ -1113,20 +1100,20 @@ class Engine:
                                    desc="BAL Design"):
                 y_hat = {key: items[idx] for key, items in y_can.items()}
                 std = {key: items[idx] for key, items in std_can.items()}
-
-                # print(y_hat)
-                # print(std)
+                
+               # print(y_hat)
+               # print(std)
                 U_J_d[idx] = self.util_BayesianActiveDesign(
                     y_hat, std, sigma2Dict, var)
 
         elif method.lower() == 'bayesoptdesign':
             NCandidate = candidates.shape[0]
-            U_J_d = np.zeros(NCandidate)
+            U_J_d = np.zeros((NCandidate))
             for idx, X_can in tqdm(enumerate(candidates), ascii=True,
                                    desc="OptBayesianDesign"):
                 U_J_d[idx] = self.util_BayesianDesign(X_can, X_MC, sigma2Dict,
                                                       var)
-        return index, -1 * U_J_d
+        return (index, -1 * U_J_d)
 
     # -------------------------------------------------------------------------
     def dual_annealing(self, method, Bounds, sigma2Dict, var, Run_No,
@@ -1143,7 +1130,6 @@ class Engine:
             List of lower and upper boundaries of parameters.
         sigma2Dict : dict
             A dictionary containing the measurement errors (sigma^2).
-        var : unknown
         Run_No : int
             Run number.
         verbose : bool, optional
@@ -1161,14 +1147,13 @@ class Engine:
         Model = self.Model
         max_func_itr = self.ExpDesign.max_func_itr
 
-        Res_Global = None
-        if method.lower() == 'varoptdesign':
+        if method == 'VarOptDesign':
             Res_Global = opt.dual_annealing(self.util_VarBasedDesign,
                                             bounds=Bounds,
                                             args=(Model, var),
                                             maxfun=max_func_itr)
 
-        elif method.lower() == 'bayesoptdesign':
+        elif method == 'BayesOptDesign':
             Res_Global = opt.dual_annealing(self.util_BayesianDesign,
                                             bounds=Bounds,
                                             args=(Model, sigma2Dict, var),
@@ -1178,7 +1163,7 @@ class Engine:
             print(f"Global minimum: xmin = {Res_Global.x}, "
                   f"f(xmin) = {Res_Global.fun:.6f}, nfev = {Res_Global.nfev}")
 
-        return Run_No, Res_Global.x
+        return (Run_No, Res_Global.x)
 
     # -------------------------------------------------------------------------
     def tradeoff_weights(self, tradeoff_scheme, old_EDX, old_EDY):
@@ -1189,7 +1174,7 @@ class Engine:
         `None`: No exploration.
         `equal`: Same weights for exploration and exploitation scores.
         `epsilon-decreasing`: Start with more exploration and increase the
-            influence of exploitation along the way with an exponential decay
+            influence of exploitation along the way with a exponential decay
             function
         `adaptive`: An adaptive method based on:
             Liu, Haitao, Jianfei Cai, and Yew-Soon Ong. "An adaptive sampling
@@ -1213,8 +1198,6 @@ class Engine:
             Exploitation weight.
 
         """
-        exploration_weight = None
-
         if tradeoff_scheme is None:
             exploration_weight = 0
 
@@ -1224,22 +1207,22 @@ class Engine:
         elif tradeoff_scheme == 'epsilon-decreasing':
             # epsilon-decreasing scheme
             # Start with more exploration and increase the influence of
-            # exploitation along the way with an exponential decay function
+            # exploitation along the way with a exponential decay function
             initNSamples = self.ExpDesign.n_init_samples
             n_max_samples = self.ExpDesign.n_max_samples
 
             itrNumber = (self.ExpDesign.X.shape[0] - initNSamples)
             itrNumber //= self.ExpDesign.n_new_samples
 
-            tau2 = -(n_max_samples - initNSamples - 1) / np.log(1e-8)
-            exploration_weight = signal.exponential(n_max_samples - initNSamples,
+            tau2 = -(n_max_samples-initNSamples-1) / np.log(1e-8)
+            exploration_weight = signal.exponential(n_max_samples-initNSamples,
                                                     0, tau2, False)[itrNumber]
 
         elif tradeoff_scheme == 'adaptive':
 
             # Extract itrNumber
             initNSamples = self.ExpDesign.n_init_samples
-            # n_max_samples = self.ExpDesign.n_max_samples
+            n_max_samples = self.ExpDesign.n_max_samples
             itrNumber = (self.ExpDesign.X.shape[0] - initNSamples)
             itrNumber //= self.ExpDesign.n_new_samples
 
@@ -1258,7 +1241,7 @@ class Engine:
                 pce_y_prev = np.array(list(self._y_hat_prev.values()))[:, 0]
                 mseCVError = mean_squared_error(pce_y_prev, y)
 
-                exploration_weight = min([0.5 * mseError / mseCVError, 1])
+                exploration_weight = min([0.5*mseError/mseCVError, 1])
 
         # Exploitation weight
         exploitation_weight = 1 - exploration_weight
@@ -1309,11 +1292,8 @@ class Engine:
         # -----------------------------------------
         # Utility function exploit_method provided by user
         if exploit_method.lower() == 'user':
-            # TODO: is the exploit_method meant here?
-            if not hasattr(self.ExpDesign, 'ExploitFunction') or self.ExpDesign.ExploitFunction is None:
-                raise AttributeError(
-                    'Function `ExploitFunction` not given to the ExpDesign, thus cannor run user-defined sequential'
-                    'scheme')
+            if not hasattr(self.ExpDesign, 'ExploitFunction'):
+                raise AttributeError('Function `ExploitFunction` not given to the ExpDesign, thus cannor run user-defined sequential scheme')
             # TODO: syntax does not fully match the rest - can test this??
             Xnew, filteredSamples = self.ExpDesign.ExploitFunction(self)
 
@@ -1322,6 +1302,7 @@ class Engine:
 
             return Xnew, filteredSamples
 
+
         # Dual-Annealing works differently from the rest, so deal with this first
         # Here exploration and exploitation are performed simulataneously
         if explore_method == 'dual annealing':
@@ -1349,7 +1330,7 @@ class Engine:
                 results = []
                 for i in range(n_new_samples):
                     results.append(self.dual_annealing(exploit_method, subdomains[i], sigma2, var, i))
-
+                    
             # New sample
             Xnew = np.array([results[i][1] for i in range(n_new_samples)])
             print("\nXnew:\n", Xnew)
@@ -1357,30 +1338,29 @@ class Engine:
             # Computational cost
             elapsed_time = time.time() - start_time
             print("\n")
-            print(f"Elapsed_time: {round(elapsed_time, 2)} sec.")
-            print('-' * 20)
-
+            print(f"Elapsed_time: {round(elapsed_time,2)} sec.")
+            print('-'*20)
+            
             return Xnew, None
-
+        
         # Generate needed Exploration class
         explore = Exploration(self.ExpDesign, n_candidates)
         explore.w = 100  # * ndim #500  # TODO: where does this value come from?
-
+        
         # Select criterion (mc-intersite-proj-th, mc-intersite-proj)
         explore.mc_criterion = 'mc-intersite-proj'
-
+        
         # Generate the candidate samples
         # TODO: here use the sampling method provided by the expdesign?
-        # sampling_method = self.ExpDesign.sampling_method
-
+        sampling_method = self.ExpDesign.sampling_method
+        
         # TODO: changed this from 'random' for LOOCV
-        # TODO: these are commented out as they are not used !?
-        # if explore_method == 'LOOCV':
-        # allCandidates = self.ExpDesign.generate_samples(n_candidates,
-        #                                                     sampling_method)
-        # else:
-        #     allCandidates, scoreExploration = explore.get_exploration_samples()
-
+        if explore_method == 'LOOCV':
+            allCandidates = self.ExpDesign.generate_samples(n_candidates,
+                                                            sampling_method)
+        else:
+            allCandidates, scoreExploration = explore.get_exploration_samples()
+        
         # -----------------------------------------
         # ---------- EXPLORATION METHODS ----------
         # -----------------------------------------
@@ -1392,7 +1372,7 @@ class Engine:
 
             # Generate random samples
             allCandidates = self.ExpDesign.generate_samples(n_candidates,
-                                                            'random')
+                                                                'random')
 
             # Construct error model based on LCerror
             errorModel = self.MetaModel.create_ModelError(old_EDX, self.LCerror)
@@ -1420,20 +1400,6 @@ class Engine:
             if ndim == 2:
                 def plotter(points, allCandidates, Method,
                             scoreExploration=None):
-                    """
-                    unknown
-
-                    Parameters
-                    ----------
-                    points
-                    allCandidates
-                    Method
-                    scoreExploration
-
-                    Returns
-                    -------
-
-                    """
                     if Method == 'Voronoi':
                         from scipy.spatial import Voronoi, voronoi_plot_2d
                         vor = Voronoi(points)
@@ -1447,7 +1413,7 @@ class Engine:
                     ax1.scatter(allCandidates[:, 0], allCandidates[:, 1], s=10,
                                 c='b', marker="o", label='Design candidates')
                     for i in range(points.shape[0]):
-                        txt = 'p' + str(i + 1)
+                        txt = 'p'+str(i+1)
                         ax1.annotate(txt, (points[i, 0], points[i, 1]))
                     if scoreExploration is not None:
                         for i in range(allCandidates.shape[0]):
@@ -1463,14 +1429,14 @@ class Engine:
         # -----------------------------------------
         # --------- EXPLOITATION METHODS ----------
         # -----------------------------------------
-        if exploit_method.lower() == 'bayesoptdesign' or \
-                exploit_method.lower() == 'bayesactdesign':
+        if exploit_method == 'BayesOptDesign' or\
+           exploit_method == 'BayesActDesign':
 
             # ------- Calculate Exoploration weight -------
             # Compute exploration weight based on trade off scheme
             explore_w, exploit_w = self.tradeoff_weights(tradeoff_scheme,
-                                                         old_EDX,
-                                                         old_EDY)
+                                                        old_EDX,
+                                                        old_EDY)
             print(f"\n Exploration weight={explore_w:0.3f} "
                   f"Exploitation weight={exploit_w:0.3f}\n")
 
@@ -1489,19 +1455,19 @@ class Engine:
                 # Split the candidates in groups for multiprocessing
                 split_cand = np.array_split(
                     candidates, n_cand_groups, axis=0
-                )
-                # print(candidates)
-                # print(split_cand)
+                    )
+               # print(candidates)
+               # print(split_cand)
                 if self.parallel:
                     results = Parallel(n_jobs=-1, backend='multiprocessing')(
                         delayed(self.run_util_func)(
                             exploit_method, split_cand[i], i, sigma2, var, X_MC)
-                        for i in range(n_cand_groups))
+                        for i in range(n_cand_groups)) 
                 else:
                     results = []
                     for i in range(n_cand_groups):
                         results.append(self.run_util_func(exploit_method, split_cand[i], i, sigma2, var, X_MC))
-
+                        
                 # Retrieve the results and append them
                 U_J_d = np.concatenate([results[NofE][1] for NofE in
                                         range(n_cand_groups)])
@@ -1523,28 +1489,29 @@ class Engine:
             # ------- Calculate Total score -------
             # ------- Trade off between EXPLORATION & EXPLOITATION -------
             # Accumulate the samples
-            finalCandidates = np.concatenate((allCandidates, candidates), axis=0)
-            finalCandidates = np.unique(finalCandidates, axis=0)
-
+            finalCandidates = np.concatenate((allCandidates, candidates), axis = 0)   
+            finalCandidates = np.unique(finalCandidates, axis = 0)
+            
             # Calculations take into account both exploration and exploitation 
             # samples without duplicates
             totalScore = np.zeros(finalCandidates.shape[0])
-            # self.totalScore = totalScore
-
+            #self.totalScore = totalScore
+            
             for cand_idx in range(finalCandidates.shape[0]):
                 # find candidate indices
                 idx1 = np.where(allCandidates == finalCandidates[cand_idx])[0]
                 idx2 = np.where(candidates == finalCandidates[cand_idx])[0]
-
+                
                 # exploration 
-                if idx1.shape[0] > 0:
+                if idx1 != []:
                     idx1 = idx1[0]
                     totalScore[cand_idx] += explore_w * scoreExploration[idx1]
-
+                    
                 # exploitation
-                if idx2.shape[0] > 0:
+                if idx2 != []:
                     idx2 = idx2[0]
                     totalScore[cand_idx] += exploit_w * norm_U_J_d[idx2]
+                
 
             # Total score
             totalScore = exploit_w * norm_U_J_d
@@ -1580,23 +1547,23 @@ class Engine:
                 Xnew = finalCandidates[sorted_idxtotalScore[:n_new_samples]]
 
 
-        elif exploit_method.lower() == 'varoptdesign':
+        elif exploit_method == 'VarOptDesign':
             # ------- EXPLOITATION: VarOptDesign -------
             UtilMethod = var
 
             # ------- Calculate Exoploration weight -------
             # Compute exploration weight based on trade off scheme
             explore_w, exploit_w = self.tradeoff_weights(tradeoff_scheme,
-                                                         old_EDX,
-                                                         old_EDY)
+                                                        old_EDX,
+                                                        old_EDY)
             print(f"\nweightExploration={explore_w:0.3f} "
                   f"weightExploitation={exploit_w:0.3f}")
 
             # Generate candidate samples from Exploration class
             nMeasurement = old_EDY[OutputNames[0]].shape[1]
-
-            # print(UtilMethod)
-
+            
+           # print(UtilMethod)
+            
             # Find sensitive region
             if UtilMethod == 'LOOCV':
                 LCerror = self.MetaModel.LCerror
@@ -1608,12 +1575,12 @@ class Engine:
                             LCerror[y_key][key])
 
                 ExploitScore = np.max(np.max(allModifiedLOO, axis=1), axis=1)
-            # print(allModifiedLOO.shape)
+               # print(allModifiedLOO.shape)
 
             elif UtilMethod in ['EIGF', 'ALM']:
                 # ----- All other in  ['EIGF', 'ALM'] -----
                 # Initilize the ExploitScore array
-                # ExploitScore = np.zeros((len(old_EDX), len(OutputNames)))
+                ExploitScore = np.zeros((len(old_EDX), len(OutputNames)))
 
                 # Split the candidates in groups for multiprocessing
                 if explore_method != 'Voronoi':
@@ -1663,9 +1630,9 @@ class Engine:
             # Normalize U_J_d
             ExploitScore = ExploitScore / np.sum(ExploitScore)
             totalScore = exploit_w * ExploitScore
-            # print(totalScore.shape)
-            # print(explore_w)
-            # print(scoreExploration.shape)
+           # print(totalScore.shape)
+           # print(explore_w)
+           # print(scoreExploration.shape)
             totalScore += explore_w * scoreExploration
 
             temp = totalScore.copy()
@@ -1687,7 +1654,7 @@ class Engine:
                     # select the requested number of samples
                     Xnew[i] = newSamples[np.argmax(maxminScore)]
 
-        elif exploit_method.lower() == 'alphabetic':
+        elif exploit_method == 'alphabetic':
             # ------- EXPLOITATION: ALPHABETIC -------
             Xnew = self.util_AlphOptDesign(allCandidates, var)
 
@@ -1709,7 +1676,7 @@ class Engine:
             raise NameError('The requested design method is not available.')
 
         print("\n")
-        print("\nRun No. {}:".format(old_EDX.shape[0] + 1))
+        print("\nRun No. {}:".format(old_EDX.shape[0]+1))
         print("Xnew:\n", Xnew)
 
         # TODO: why does it also return None?
@@ -1727,7 +1694,7 @@ class Engine:
 
         Arguments
         ---------
-        candidates : int?
+        NCandidate : int
             Number of candidate points to be searched
 
         var : string
@@ -1738,7 +1705,7 @@ class Engine:
         X_new : array of shape (1, n_params)
             The new sampling location in the input space.
         """
-        MetaModelOrig = self  # TODO: this doesn't fully seem correct?
+        MetaModelOrig = self # TODO: this doesn't fully seem correct?
         n_new_samples = MetaModelOrig.ExpDesign.n_new_samples
         NCandidate = candidates.shape[0]
 
@@ -1746,7 +1713,7 @@ class Engine:
         OutputName = self.out_names[0]
 
         # To avoid changes ub original aPCE object
-        # MetaModel = deepcopy(MetaModelOrig)
+        MetaModel = deepcopy(MetaModelOrig)
 
         # Old Experimental design
         oldExpDesignX = self.ExpDesign.X
@@ -1755,14 +1722,19 @@ class Engine:
         # Suggestion: Go for the one with the highest LOO error
         # TODO: this is just a patch, need to look at again!
         Scores = list(self.MetaModel.score_dict['b_1'][OutputName].values())
-        ModifiedLOO = [1 - score for score in Scores]
+        #print(Scores)
+        #print(self.MetaModel.score_dict)
+        #print(self.MetaModel.score_dict.values())
+        #print(self.MetaModel.score_dict['b_1'].values())
+        #print(self.MetaModel.score_dict['b_1'][OutputName].values())
+        ModifiedLOO = [1-score for score in Scores]
         outIdx = np.argmax(ModifiedLOO)
 
         # Initialize Phi to save the criterion's values
-        Phi = np.zeros(NCandidate)
+        Phi = np.zeros((NCandidate))
 
         # TODO: also patched here
-        BasisIndices = self.MetaModel.basis_dict['b_1'][OutputName]["y_" + str(outIdx + 1)]
+        BasisIndices = self.MetaModel.basis_dict['b_1'][OutputName]["y_"+str(outIdx+1)]
         P = len(BasisIndices)
 
         # ------ Old Psi ------------
@@ -1781,9 +1753,10 @@ class Engine:
 
             # Information matrix
             PsiTPsi = np.dot(Psi_cand.T, Psi_cand)
-            M = PsiTPsi / (len(oldExpDesignX) + 1)
+            M = PsiTPsi / (len(oldExpDesignX)+1)
 
-            if 1e-12 < np.linalg.cond(PsiTPsi) < 1 / sys.float_info.epsilon:
+            if np.linalg.cond(PsiTPsi) > 1e-12 \
+               and np.linalg.cond(PsiTPsi) < 1 / sys.float_info.epsilon:
                 # faster
                 invM = linalg.solve(M, sparse.eye(PsiTPsi.shape[0]).toarray())
             else:
@@ -1795,7 +1768,7 @@ class Engine:
 
             # D-Opt
             if var.lower() == 'd-opt':
-                Phi[idx] = (np.linalg.det(invM)) ** (1 / P)
+                Phi[idx] = (np.linalg.det(invM)) ** (1/P)
 
             # A-Opt
             elif var.lower() == 'a-opt':
@@ -1806,9 +1779,9 @@ class Engine:
                 Phi[idx] = np.linalg.cond(M)
 
             else:
-                # print(var.lower())
+               # print(var.lower())
                 raise Exception('The optimality criterion you requested has '
-                                'not been implemented yet!')
+                      'not been implemented yet!')
 
         # find an optimal point subset to add to the initial design
         # by minimization of the Phi
@@ -1821,7 +1794,7 @@ class Engine:
 
     # -------------------------------------------------------------------------
     def _normpdf(self, y_hat_pce, std_pce, obs_data, total_sigma2s,
-                 rmse=None):
+                  rmse=None):
         """
         Calculated gaussian likelihood for given y+std based on given obs+sigma
         # TODO: is this understanding correct?
@@ -1853,7 +1826,7 @@ class Engine:
         for idx, out in enumerate(self.out_names):
 
             # (Meta)Model Output
-            # print(y_hat_pce[out])
+           # print(y_hat_pce[out])
             nsamples, nout = y_hat_pce[out].shape
 
             # Prepare data and remove NaN
@@ -1868,9 +1841,9 @@ class Engine:
 
             # Surrogate error if valid dataset is given.
             if rmse is not None:
-                tot_sigma2s += rmse[out] ** 2
+                tot_sigma2s += rmse[out]**2
             else:
-                tot_sigma2s += np.mean(std_pce[out]) ** 2
+                tot_sigma2s += np.mean(std_pce[out])**2
 
             likelihoods *= stats.multivariate_normal.pdf(
                 y_hat_pce[out], data, np.diag(tot_sigma2s),
@@ -1897,8 +1870,8 @@ class Engine:
         # TODO: Evaluate MetaModel on the experimental design and ValidSet
         OutputRS, stdOutputRS = MetaModel.eval_metamodel(samples=samples)
 
-        logLik_data = np.zeros(n_samples)
-        logLik_model = np.zeros(n_samples)
+        logLik_data = np.zeros((n_samples))
+        logLik_model = np.zeros((n_samples))
         # Loop over the outputs
         for idx, out in enumerate(output_names):
 
@@ -1919,6 +1892,7 @@ class Engine:
             covMatrix_data = np.diag(tot_sigma2s)
 
             for i, sample in enumerate(samples):
+
                 # Simulation run
                 y_m = model_outputs[out][i]
 
@@ -1927,22 +1901,22 @@ class Engine:
 
                 # CovMatrix with the surrogate error
                 # covMatrix = np.diag(stdOutputRS[out][i]**2)
-                # covMatrix = np.diag((y_m - y_m_hat) ** 2)
+                covMatrix = np.diag((y_m-y_m_hat)**2)
                 covMatrix = np.diag(
-                    np.mean((model_outputs[out] - OutputRS[out]), axis=0) ** 2
-                )
+                    np.mean((model_outputs[out]-OutputRS[out]), axis=0)**2
+                    )
 
                 # Compute likelilhood output vs data
                 logLik_data[i] += logpdf(
                     y_m_hat, data, covMatrix_data
-                )
+                    )
 
                 # Compute likelilhood output vs surrogate
                 logLik_model[i] += logpdf(y_m_hat, y_m, covMatrix)
 
         # Weight
         logLik_data -= logBME
-        weights = np.exp(logLik_model + logLik_data)
+        weights = np.exp(logLik_model+logLik_data)
 
         return np.log(np.mean(weights))
 
@@ -1968,7 +1942,7 @@ class Engine:
         """
 
         # Initialization
-        newpath = r'Outputs_SeqPosteriorComparison/posterior'
+        newpath = (r'Outputs_SeqPosteriorComparison/posterior')
         os.makedirs(newpath, exist_ok=True)
 
         bound_tuples = self.ExpDesign.bound_tuples
@@ -2010,6 +1984,7 @@ class Engine:
 
         return figPosterior
 
+    
     # -------------------------------------------------------------------------
     def _BME_Calculator(self, obs_data, sigma2Dict, rmse=None):
         """
@@ -2031,8 +2006,7 @@ class Engine:
         
         """
         # Initializations
-        # TODO: this just does not make sense, recheck from old commits
-        if self.valid_likelihoods is not None:
+        if hasattr(self, 'valid_likelihoods'):
             valid_likelihoods = self.valid_likelihoods
         else:
             valid_likelihoods = []
@@ -2040,7 +2014,7 @@ class Engine:
 
         post_snapshot = self.ExpDesign.post_snapshot
         if post_snapshot or valid_likelihoods.shape[0] != 0:
-            newpath = r'Outputs_SeqPosteriorComparison/likelihood_vs_ref'
+            newpath = (r'Outputs_SeqPosteriorComparison/likelihood_vs_ref')
             os.makedirs(newpath, exist_ok=True)
 
         SamplingMethod = 'random'
@@ -2053,7 +2027,7 @@ class Engine:
             # Generate samples for Monte Carlo simulation
             X_MC = self.ExpDesign.generate_samples(
                 MCsize, SamplingMethod
-            )
+                )
 
             # Monte Carlo simulation for the candidate design
             Y_MC, std_MC = self.MetaModel.eval_metamodel(samples=X_MC)
@@ -2062,10 +2036,10 @@ class Engine:
             # simulation results via PCE with candidate design)
             Likelihoods = self._normpdf(
                 Y_MC, std_MC, obs_data, sigma2Dict, rmse
-            )
+                )
 
             # Check the Effective Sample Size (1000<ESS<MCsize)
-            ESS = 1 / np.sum(np.square(Likelihoods / np.sum(Likelihoods)))
+            ESS = 1 / np.sum(np.square(Likelihoods/np.sum(Likelihoods)))
 
             # Enlarge sample size if it doesn't fulfill the criteria
             if (ESS > MCsize) or (ESS < 1):
@@ -2078,7 +2052,7 @@ class Engine:
         unif = np.random.rand(1, MCsize)[0]
 
         # Reject the poorly performed prior
-        accepted = (Likelihoods / np.max(Likelihoods)) >= unif
+        accepted = (Likelihoods/np.max(Likelihoods)) >= unif
         X_Posterior = X_MC[accepted]
 
         # ------------------------------------------------------------
@@ -2094,17 +2068,16 @@ class Engine:
         postExpLikelihoods = np.mean(np.log(Likelihoods[accepted]))
 
         # Posterior-based expectation of prior densities
-        # TODO: this is commented out, as it is not used again
-        # postExpPrior = np.mean(
-        #     np.log(self.ExpDesign.JDist.pdf(X_Posterior.T))
-        # )
+        postExpPrior = np.mean(
+            np.log(self.ExpDesign.JDist.pdf(X_Posterior.T))
+            )
 
         # Calculate Kullback-Leibler Divergence
         # KLD = np.mean(np.log(Likelihoods[Likelihoods!=0])- logBME)
         KLD = postExpLikelihoods - logBME
 
         # Information Entropy based on Entropy paper Eq. 38
-        # infEntropy = logBME - postExpPrior - postExpLikelihoods
+        infEntropy = logBME - postExpPrior - postExpLikelihoods
 
         # If post_snapshot is True, plot likelihood vs refrence
         if post_snapshot or valid_likelihoods:
@@ -2113,10 +2086,10 @@ class Engine:
             ref_like = np.log(valid_likelihoods[(valid_likelihoods > 0)])
             est_like = np.log(Likelihoods[Likelihoods > 0])
             distHellinger = hellinger_distance(ref_like, est_like)
-
+            
             idx = len([name for name in os.listdir(newpath) if 'Likelihoods_'
                        in name and os.path.isfile(os.path.join(newpath, name))])
-
+            
             fig, ax = plt.subplots()
             try:
                 sns.kdeplot(np.log(valid_likelihoods[valid_likelihoods > 0]),
@@ -2157,7 +2130,7 @@ class Engine:
                 'n_walkers': 30,
                 'moves': emcee.moves.KDEMove(),
                 'verbose': False
-            }
+                }
 
             # ----- Define the discrepancy model -------
             # TODO: check with Farid if this first line is how it should be
@@ -2169,13 +2142,13 @@ class Engine:
             # # -- (Option B) --
             DiscrepancyOpts = Discrepancy('')
             DiscrepancyOpts.type = 'Gaussian'
-            DiscrepancyOpts.parameters = obs_data ** 2
+            DiscrepancyOpts.parameters = obs_data**2
             BayesOpts.Discrepancy = DiscrepancyOpts
             # Start the calibration/inference
             Bayes_PCE = BayesOpts.create_inference()
             X_Posterior = Bayes_PCE.posterior_df.values
 
-        return logBME, KLD, X_Posterior, Likelihoods, distHellinger
+        return (logBME, KLD, X_Posterior, Likelihoods, distHellinger)
 
     # -------------------------------------------------------------------------
     def _validError(self):
@@ -2207,14 +2180,14 @@ class Engine:
                 sample_weight=None,
                 squared=False)
             # Validation error
-            valid_error[key] = (rms_error[key] ** 2)
+            valid_error[key] = (rms_error[key]**2)
             valid_error[key] /= np.var(valid_model_runs[key], ddof=1, axis=0)
 
             # Print a report table
             print("\n>>>>> Updated Errors of {} <<<<<".format(key))
             print("\nIndex  |  RMSE   |  Validation Error")
-            print('-' * 35)
-            print('\n'.join(f'{i + 1}  |  {k:.3e}  |  {j:.3e}' for i, (k, j)
+            print('-'*35)
+            print('\n'.join(f'{i+1}  |  {k:.3e}  |  {j:.3e}' for i, (k, j)
                             in enumerate(zip(rms_error[key],
                                              valid_error[key]))))
 
@@ -2240,12 +2213,13 @@ class Engine:
 
         # Compute the root mean squared error
         for output in self.out_names:
+
             # Compute the error between mean and std of MetaModel and OrigModel
             RMSE_Mean = mean_squared_error(
                 self.Model.mc_reference['mean'], pce_means[output], squared=False
-            )
+                )
             RMSE_std = mean_squared_error(
                 self.Model.mc_reference['std'], pce_stds[output], squared=False
-            )
+                )
 
         return RMSE_Mean, RMSE_std
diff --git a/src/bayesvalidrox/surrogate_models/exp_designs.py b/src/bayesvalidrox/surrogate_models/exp_designs.py
index ce1745903..fa03fe17d 100644
--- a/src/bayesvalidrox/surrogate_models/exp_designs.py
+++ b/src/bayesvalidrox/surrogate_models/exp_designs.py
@@ -4,17 +4,18 @@
 Experimental design with associated sampling methods
 """
 
-import itertools
+import numpy as np
 import math
-
+import itertools
 import chaospy
+import scipy.stats as st
+from tqdm import tqdm
 import h5py
-import numpy as np
+import os
 
 from .apoly_construction import apoly_construction
 from .input_space import InputSpace
 
-
 # -------------------------------------------------------------------------
 def check_ranges(theta, ranges):
     """
@@ -25,12 +26,12 @@ def check_ranges(theta, ranges):
     theta : array
         Proposed parameter set.
     ranges : nested list
-        The parameter ranges.
+        List of the praremeter ranges.
 
     Returns
     -------
     c : bool
-        If it lies in the given range, it returns True else False.
+        If it lies in the given range, it return True else False.
 
     """
     c = True
@@ -51,7 +52,7 @@ class ExpDesigns(InputSpace):
 
     Attributes
     ----------
-    input_object : obj
+    Input : obj
         Input object containing the parameter marginals, i.e. name,
         distribution type and distribution parameters or available raw data.
     meta_Model_type : str
@@ -142,17 +143,15 @@ class ExpDesigns(InputSpace):
     - K-Opt (K-Optimality)
     """
 
-    def __init__(self, input_object, meta_Model_type='pce', sampling_method='random', hdf5_file=None,
-                 n_new_samples=1, n_max_samples=None, mod_LOO_threshold=1e-16, tradeoff_scheme=None, n_canddidate=1,
-                 explore_method='random', exploit_method='Space-filling', util_func='Space-filling', n_cand_groups=4,
-                 n_replication=1, post_snapshot=False, step_snapshot=1, max_a_post=None, adapt_verbose=False,
-                 max_func_itr=1):
+    def __init__(self, Input, meta_Model_type='pce',
+                 sampling_method='random', hdf5_file=None,
+                 n_new_samples=1, n_max_samples=None, mod_LOO_threshold=1e-16,
+                 tradeoff_scheme=None, n_canddidate=1, explore_method='random',
+                 exploit_method='Space-filling', util_func='Space-filling',
+                 n_cand_groups=4, n_replication=1, post_snapshot=False,
+                 step_snapshot=1, max_a_post=[], adapt_verbose=False, max_func_itr=1):
 
-        super().__init__(input_object, meta_Model_type)
-        if max_a_post is None:
-            max_a_post = []
-
-        self.InputObj = input_object
+        self.InputObj = Input
         self.meta_Model_type = meta_Model_type
         self.sampling_method = sampling_method
         self.hdf5_file = hdf5_file
@@ -171,20 +170,17 @@ class ExpDesigns(InputSpace):
         self.max_a_post = max_a_post
         self.adapt_verbose = adapt_verbose
         self.max_func_itr = max_func_itr
-
+        
         # Other 
         self.apce = None
-        self.n_init_samples = None
-        self.n_samples = None
         self.ndim = None
-        self.X = None
-        self.Y = None
-
+        
         # Init 
         self.check_valid_inputs()
-
+        
     # -------------------------------------------------------------------------
-    def generate_samples(self, n_samples, sampling_method='random'):
+    def generate_samples(self, n_samples, sampling_method='random',
+                         transform=False):
         """
         Generates samples with given sampling method
 
@@ -194,6 +190,9 @@ class ExpDesigns(InputSpace):
             Number of requested samples.
         sampling_method : str, optional
             Sampling method. The default is `'random'`.
+        transform : bool, optional
+            Transformation via an isoprobabilistic transformation method. The
+            default is `False`.
 
         Returns
         -------
@@ -204,14 +203,17 @@ class ExpDesigns(InputSpace):
         try:
             samples = chaospy.generate_samples(
                 int(n_samples), domain=self.origJDist, rule=sampling_method
-            )
+                )
         except:
             samples = self.random_sampler(int(n_samples)).T
 
         return samples.T
 
+
+            
     # -------------------------------------------------------------------------
-    def generate_ED(self, n_samples, max_pce_deg=None):
+    def generate_ED(self, n_samples, transform=False,
+                    max_pce_deg=None):
         """
         Generates experimental designs (training set) with the given method.
 
@@ -219,6 +221,10 @@ class ExpDesigns(InputSpace):
         ----------
         n_samples : int
             Number of requested training points.
+        sampling_method : str, optional
+            Sampling method. The default is `'random'`.
+        transform : bool, optional
+            Isoprobabilistic transformation. The default is `False`.
         max_pce_deg : int, optional
             Maximum PCE polynomial degree. The default is `None`.
             
@@ -227,23 +233,21 @@ class ExpDesigns(InputSpace):
         None
 
         """
-        if n_samples < 0:
+        if n_samples <0:
             raise ValueError('A negative number of samples cannot be created. Please provide positive n_samples')
         n_samples = int(n_samples)
-
-        if self.n_init_samples is None:
+        
+        if not hasattr(self, 'n_init_samples'):
             self.n_init_samples = n_samples
 
         # Generate the samples based on requested method
         self.init_param_space(max_pce_deg)
 
-        samples = None
         sampling_method = self.sampling_method
         # Pass user-defined samples as ED
         if sampling_method == 'user':
-            if self.X is None:
-                raise AttributeError('User-defined sampling cannot proceed as no samples provided. Please add them to '
-                                     'this class as attribute X')
+            if not hasattr(self, 'X'):
+                raise AttributeError('User-defined sampling cannot proceed as no samples provided. Please add them to this class as attribute X')
             if not self.X.ndim == 2:
                 raise AttributeError('The provided samples shuld have 2 dimensions')
             samples = self.X
@@ -275,7 +279,7 @@ class ExpDesigns(InputSpace):
                                                rule=sampling_method).T
 
         self.X = samples
-
+            
     def read_from_file(self, out_names):
         """
         Reads in the ExpDesign from a provided h5py file and saves the results.
@@ -290,7 +294,7 @@ class ExpDesigns(InputSpace):
         None.
 
         """
-        if self.hdf5_file is None:
+        if self.hdf5_file == None:
             raise AttributeError('ExpDesign cannot be read in, please provide hdf5 file first')
 
         # Read hdf5 file
@@ -327,9 +331,11 @@ class ExpDesigns(InputSpace):
         f.close()
         print(f'Experimental Design is read in from file {self.hdf5_file}')
         print('')
+        
+    
 
     # -------------------------------------------------------------------------
-    def random_sampler(self, n_samples, max_deg=None):
+    def random_sampler(self, n_samples, max_deg = None):
         """
         Samples the given raw data randomly.
 
@@ -349,10 +355,10 @@ class ExpDesigns(InputSpace):
             The sampling locations in the input space.
 
         """
-        if self.raw_data is None:
+        if not hasattr(self, 'raw_data'):
             self.init_param_space(max_deg)
         else:
-            if np.array(self.raw_data).ndim != 2:
+            if np.array(self.raw_data).ndim !=2:
                 raise AttributeError('The given raw data for sampling should have two dimensions')
         samples = np.zeros((n_samples, self.ndim))
         sample_size = self.raw_data.shape[1]
@@ -365,18 +371,10 @@ class ExpDesigns(InputSpace):
                 # store the raw data with given random indices
                 samples[:, pa_idx] = self.raw_data[pa_idx, rand_idx]
         else:
-            if self.JDist is None:
-                raise AttributeError('Sampling cannot proceed, build ExpDesign with max_deg != 0 to create JDist!')
             try:
-                # Use resample if JDist is of type gaussian_kde
                 samples = self.JDist.resample(int(n_samples)).T
             except AttributeError:
-                # Use sample if JDist is of type chaospy.J
                 samples = self.JDist.sample(int(n_samples)).T
-            # If there is only one input transform the samples
-            if self.ndim == 1:
-                samples = np.swapaxes(np.atleast_2d(samples), 0, 1)
-
             # Check if all samples are in the bound_tuples
             for idx, param_set in enumerate(samples):
                 if not check_ranges(param_set, self.bound_tuples):
@@ -386,7 +384,7 @@ class ExpDesigns(InputSpace):
                     except:
                         proposed_sample = self.JDist.resample(1).T[0]
                     while not check_ranges(proposed_sample,
-                                           self.bound_tuples):
+                                                 self.bound_tuples):
                         try:
                             proposed_sample = chaospy.generate_samples(
                                 1, domain=self.JDist, rule='random').T[0]
@@ -416,67 +414,49 @@ class ExpDesigns(InputSpace):
             Collocation points.
 
         """
-
-        if self.raw_data is None:
+        
+        if not hasattr(self, 'raw_data'):
             self.init_param_space(max_deg)
 
         raw_data = self.raw_data
 
         # Guess the closest degree to self.n_samples
         def M_uptoMax(deg):
-            """
-            ??
-            Parameters
-            ----------
-            deg : int
-                Degree.
-
-            Returns
-            -------
-            list of ..?
-            """
             result = []
-            for d in range(1, deg + 1):
-                result.append(math.factorial(self.ndim + d) //
+            for d in range(1, deg+1):
+                result.append(math.factorial(self.ndim+d) //
                               (math.factorial(self.ndim) * math.factorial(d)))
             return np.array(result)
+        #print(M_uptoMax(max_deg))
+        #print(np.where(M_uptoMax(max_deg) > n_samples)[0])
 
         guess_Deg = np.where(M_uptoMax(max_deg) > n_samples)[0][0]
 
-        c_points = np.zeros((guess_Deg + 1, self.ndim))
+        c_points = np.zeros((guess_Deg+1, self.ndim))
 
         def PolynomialPa(parIdx):
-            """
-            ???
-            Parameters
-            ----------
-            parIdx
-
-            Returns
-            -------
-
-            """
             return apoly_construction(self.raw_data[parIdx], max_deg)
 
         for i in range(self.ndim):
-            poly_coeffs = PolynomialPa(i)[guess_Deg + 1][::-1]
+            poly_coeffs = PolynomialPa(i)[guess_Deg+1][::-1]
             c_points[:, i] = np.trim_zeros(np.roots(poly_coeffs))
 
         #  Construction of optimal integration points
-        Prod = itertools.product(np.arange(1, guess_Deg + 2), repeat=self.ndim)
+        Prod = itertools.product(np.arange(1, guess_Deg+2), repeat=self.ndim)
         sort_dig_unique_combos = np.array(list(filter(lambda x: x, Prod)))
 
         # Ranking relatively mean
-        Temp = np.empty(shape=[0, guess_Deg + 1])
+        Temp = np.empty(shape=[0, guess_Deg+1])
         for j in range(self.ndim):
-            s = abs(c_points[:, j] - np.mean(raw_data[j]))
+            s = abs(c_points[:, j]-np.mean(raw_data[j]))
             Temp = np.append(Temp, [s], axis=0)
         temp = Temp.T
 
         index_CP = np.sort(temp, axis=0)
-        sort_cpoints = np.empty((0, guess_Deg + 1))
+        sort_cpoints = np.empty((0, guess_Deg+1))
 
         for j in range(self.ndim):
+            #print(index_CP[:, j])
             sort_cp = c_points[index_CP[:, j], j]
             sort_cpoints = np.vstack((sort_cpoints, sort_cp))
 
@@ -484,9 +464,8 @@ class ExpDesigns(InputSpace):
         sort_unique_combos = np.empty(shape=[0, self.ndim])
         for i in range(len(sort_dig_unique_combos)):
             sort_un_comb = []
-            sort_uni_comb = None
             for j in range(self.ndim):
-                SortUC = sort_cpoints[j, sort_dig_unique_combos[i, j] - 1]
+                SortUC = sort_cpoints[j, sort_dig_unique_combos[i, j]-1]
                 sort_un_comb.append(SortUC)
                 sort_uni_comb = np.asarray(sort_un_comb)
             sort_unique_combos = np.vstack((sort_unique_combos, sort_uni_comb))
diff --git a/src/bayesvalidrox/surrogate_models/exploration.py b/src/bayesvalidrox/surrogate_models/exploration.py
index 67decae2b..6abb652f1 100644
--- a/src/bayesvalidrox/surrogate_models/exploration.py
+++ b/src/bayesvalidrox/surrogate_models/exploration.py
@@ -33,7 +33,6 @@ class Exploration:
 
     def __init__(self, ExpDesign, n_candidate,
                  mc_criterion='mc-intersite-proj-th'):
-        self.closestPoints = None
         self.ExpDesign = ExpDesign
         self.n_candidate = n_candidate
         self.mc_criterion = mc_criterion
diff --git a/src/bayesvalidrox/surrogate_models/input_space.py b/src/bayesvalidrox/surrogate_models/input_space.py
index c534d34b2..4e010d66f 100644
--- a/src/bayesvalidrox/surrogate_models/input_space.py
+++ b/src/bayesvalidrox/surrogate_models/input_space.py
@@ -9,7 +9,6 @@ import chaospy
 import scipy.stats as st
 
 
-# noinspection SpellCheckingInspection
 class InputSpace:
     """
     This class generates the input space for the metamodel from the
@@ -25,32 +24,24 @@ class InputSpace:
 
     """
 
-    def __init__(self, input_object, meta_Model_type='pce'):
-        self.InputObj = input_object
+    def __init__(self, Input, meta_Model_type='pce'):
+        self.InputObj = Input
         self.meta_Model_type = meta_Model_type
-
+        
         # Other 
         self.apce = None
-        self.bound_tuples = None
-        self.input_data_given = None
-        self.JDist = None
-        self.MCSize = None
         self.ndim = None
-        self.origJDist = None
-        self.par_names = None
-        self.poly_types = None
-        self.prior_space = None
-        self.raw_data = None
-
+        
         # Init 
         self.check_valid_inputs()
-
-    def check_valid_inputs(self) -> None:
+        
+        
+    def check_valid_inputs(self)-> None:
         """
         Check if the given InputObj is valid to use for further calculations:
-        1) Has some Marginals
-        2) The Marginals have valid priors
-        3) All Marginals given as the same type (samples vs dist)
+            Has some Marginals
+            Marginals have valid priors
+            All Marginals given as the same type (samples vs dist)
 
         Returns
         -------
@@ -59,7 +50,7 @@ class InputSpace:
         """
         Inputs = self.InputObj
         self.ndim = len(Inputs.Marginals)
-
+        
         # Check if PCE or aPCE metamodel is selected.
         # TODO: test also for 'pce'??
         if self.meta_Model_type.lower() == 'apce':
@@ -68,22 +59,26 @@ class InputSpace:
             self.apce = False
 
         # check if marginals given 
-        if not self.ndim >= 1:
+        if not self.ndim >=1:
             raise AssertionError('Cannot build distributions if no marginals are given')
-
+            
         # check that each marginal is valid
         for marginals in Inputs.Marginals:
             if len(marginals.input_data) == 0:
-                if marginals.dist_type is None:
+                if marginals.dist_type == None:
                     raise AssertionError('Not all marginals were provided priors')
-            if np.array(marginals.input_data).shape[0] and (marginals.dist_type is not None):
+                    break
+            if np.array(marginals.input_data).shape[0] and (marginals.dist_type != None):
                 raise AssertionError('Both samples and distribution type are given. Please choose only one.')
-
+                break
+                
         # Check if input is given as dist or input_data.
         self.input_data_given = -1
         for marg in Inputs.Marginals:
+            #print(self.input_data_given)
             size = np.array(marg.input_data).shape[0]
-            if size and abs(self.input_data_given) != 1:
+            #print(f'Size: {size}')
+            if size and abs(self.input_data_given) !=1:
                 self.input_data_given = 2
                 break
             if (not size) and self.input_data_given > 0:
@@ -93,10 +88,11 @@ class InputSpace:
                 self.input_data_given = 0
             if size:
                 self.input_data_given = 1
-
+                
         if self.input_data_given == 2:
             raise AssertionError('Distributions cannot be built as the priors have different types')
-
+            
+    
         # Get the bounds if input_data are directly defined by user:
         if self.input_data_given:
             for i in range(self.ndim):
@@ -104,6 +100,8 @@ class InputSpace:
                 up_bound = np.max(Inputs.Marginals[i].input_data)
                 Inputs.Marginals[i].parameters = [low_bound, up_bound]
 
+  
+
     # -------------------------------------------------------------------------
     def init_param_space(self, max_deg=None):
         """
@@ -114,7 +112,7 @@ class InputSpace:
         max_deg : int, optional
             Maximum degree. The default is `None`.
 
-        Returns
+        Creates
         -------
         raw_data : array of shape (n_params, n_samples)
             Raw data.
@@ -124,7 +122,7 @@ class InputSpace:
         """
         # Recheck all before running!
         self.check_valid_inputs()
-
+        
         Inputs = self.InputObj
         ndim = self.ndim
         rosenblatt_flag = Inputs.Rosenblatt
@@ -194,17 +192,16 @@ class InputSpace:
         orig_space_dist : object
             A chaospy JDist object or a gaussian_kde object.
         poly_types : list
-            A list of polynomial types for the parameters.
+            List of polynomial types for the parameters.
 
         """
         Inputs = self.InputObj
-
+        
         all_data = []
         all_dist_types = []
         orig_joints = []
         poly_types = []
-        params = None
-
+        
         for parIdx in range(self.ndim):
 
             if Inputs.Marginals[parIdx].dist_type is None:
@@ -225,27 +222,27 @@ class InputSpace:
 
             elif 'unif' in dist_type.lower():
                 polytype = 'legendre'
-                if not np.array(params).shape[0] >= 2:
+                if not np.array(params).shape[0]>=2:
                     raise AssertionError('Distribution has too few parameters!')
                 dist = chaospy.Uniform(lower=params[0], upper=params[1])
 
             elif 'norm' in dist_type.lower() and \
-                    'log' not in dist_type.lower():
-                if not np.array(params).shape[0] >= 2:
+                 'log' not in dist_type.lower():
+                if not np.array(params).shape[0]>=2:
                     raise AssertionError('Distribution has too few parameters!')
                 polytype = 'hermite'
                 dist = chaospy.Normal(mu=params[0], sigma=params[1])
 
             elif 'gamma' in dist_type.lower():
                 polytype = 'laguerre'
-                if not np.array(params).shape[0] >= 3:
+                if not np.array(params).shape[0]>=3:
                     raise AssertionError('Distribution has too few parameters!')
                 dist = chaospy.Gamma(shape=params[0],
                                      scale=params[1],
                                      shift=params[2])
 
             elif 'beta' in dist_type.lower():
-                if not np.array(params).shape[0] >= 4:
+                if not np.array(params).shape[0]>=4:
                     raise AssertionError('Distribution has too few parameters!')
                 polytype = 'jacobi'
                 dist = chaospy.Beta(alpha=params[0], beta=params[1],
@@ -253,29 +250,29 @@ class InputSpace:
 
             elif 'lognorm' in dist_type.lower():
                 polytype = 'hermite'
-                if not np.array(params).shape[0] >= 2:
+                if not np.array(params).shape[0]>=2:
                     raise AssertionError('Distribution has too few parameters!')
-                mu = np.log(params[0] ** 2 / np.sqrt(params[0] ** 2 + params[1] ** 2))
-                sigma = np.sqrt(np.log(1 + params[1] ** 2 / params[0] ** 2))
+                mu = np.log(params[0]**2/np.sqrt(params[0]**2 + params[1]**2))
+                sigma = np.sqrt(np.log(1 + params[1]**2 / params[0]**2))
                 dist = chaospy.LogNormal(mu, sigma)
                 # dist = chaospy.LogNormal(mu=params[0], sigma=params[1])
 
             elif 'expon' in dist_type.lower():
                 polytype = 'exponential'
-                if not np.array(params).shape[0] >= 2:
+                if not np.array(params).shape[0]>=2:
                     raise AssertionError('Distribution has too few parameters!')
                 dist = chaospy.Exponential(scale=params[0], shift=params[1])
 
             elif 'weibull' in dist_type.lower():
                 polytype = 'weibull'
-                if not np.array(params).shape[0] >= 3:
+                if not np.array(params).shape[0]>=3:
                     raise AssertionError('Distribution has too few parameters!')
                 dist = chaospy.Weibull(shape=params[0], scale=params[1],
                                        shift=params[2])
 
             else:
                 message = (f"DistType {dist_type} for parameter"
-                           f"{parIdx + 1} is not available.")
+                           f"{parIdx+1} is not available.")
                 raise ValueError(message)
 
             if self.input_data_given or self.apce:
@@ -314,8 +311,6 @@ class InputSpace:
         ----------
         X : array of shape (n_samples,n_params)
             Samples to be transformed.
-        params : list
-            Parameters for laguerre/gamma-type distribution.
         method : string
             If transformation method is 'user' transform X, else just pass X.
 
@@ -326,18 +321,17 @@ class InputSpace:
 
         """
         # Check for built JDist
-        if self.JDist is None:
+        if not hasattr(self, 'JDist'):
             raise AttributeError('Call function init_param_space first to create JDist')
-
+            
         # Check if X is 2d
         if X.ndim != 2:
             raise AttributeError('X should have two dimensions')
-
+            
         # Check if size of X matches Marginals
-        if X.shape[1] != self.ndim:
-            raise AttributeError(
-                'The second dimension of X should be the same size as the number of marginals in the InputObj')
-
+        if X.shape[1]!= self.ndim:
+            raise AttributeError('The second dimension of X should be the same size as the number of marginals in the InputObj')
+        
         if self.InputObj.Rosenblatt:
             self.origJDist, _ = self.build_polytypes(False)
             if method == 'user':
@@ -360,8 +354,8 @@ class InputSpace:
             if None in disttypes or self.input_data_given or self.apce:
                 return X
 
-            cdfx = np.zeros(X.shape)
-            tr_X = np.zeros(X.shape)
+            cdfx = np.zeros((X.shape))
+            tr_X = np.zeros((X.shape))
 
             for par_i in range(n_params):
 
@@ -376,12 +370,11 @@ class InputSpace:
 
                 # Extract the parameters of the transformation space based on
                 # polyType
-                inv_cdf = None
                 if polytype == 'legendre' or disttype == 'uniform':
                     # Generate Y_Dists based
                     params_Y = [-1, 1]
                     dist_Y = st.uniform(loc=params_Y[0],
-                                        scale=params_Y[1] - params_Y[0])
+                                        scale=params_Y[1]-params_Y[0])
                     inv_cdf = np.vectorize(lambda x: dist_Y.ppf(x))
 
                 elif polytype == 'hermite' or disttype == 'norm':
@@ -390,11 +383,9 @@ class InputSpace:
                     inv_cdf = np.vectorize(lambda x: dist_Y.ppf(x))
 
                 elif polytype == 'laguerre' or disttype == 'gamma':
-                    if params is None:
+                    if params == None:
                         raise AttributeError('Additional parameters have to be set for the gamma distribution!')
                     params_Y = [1, params[1]]
-
-                    # TOOD: update the call to the gamma function, seems like source code has been changed!
                     dist_Y = st.gamma(loc=params_Y[0], scale=params_Y[1])
                     inv_cdf = np.vectorize(lambda x: dist_Y.ppf(x))
 
diff --git a/src/bayesvalidrox/surrogate_models/inputs.py b/src/bayesvalidrox/surrogate_models/inputs.py
index 40ae36337..094e1066f 100644
--- a/src/bayesvalidrox/surrogate_models/inputs.py
+++ b/src/bayesvalidrox/surrogate_models/inputs.py
@@ -4,7 +4,6 @@
 Inputs and related marginal distributions
 """
 
-
 class Input:
     """
     A class to define the uncertain input parameters.
@@ -21,18 +20,17 @@ class Input:
     -------
     Marginals can be defined as following:
 
-    >>> inputs = Inputs()
-    >>> inputs.add_marginals()
-    >>> inputs.Marginals[0].name = 'X_1'
-    >>> inputs.Marginals[0].dist_type = 'uniform'
-    >>> inputs.Marginals[0].parameters = [-5, 5]
+    >>> Inputs.add_marginals()
+    >>> Inputs.Marginals[0].name = 'X_1'
+    >>> Inputs.Marginals[0].dist_type = 'uniform'
+    >>> Inputs.Marginals[0].parameters = [-5, 5]
 
     If there is no common data is avaliable, the input data can be given
     as following:
 
-    >>> inputs.add_marginals()
-    >>> inputs.Marginals[0].name = 'X_1'
-    >>> inputs.Marginals[0].input_data = [0,0,1,0]
+    >>> Inputs.add_marginals()
+    >>> Inputs.Marginals[0].name = 'X_1'
+    >>> Inputs.Marginals[0].input_data = input_data
     """
     poly_coeffs_flag = True
 
@@ -65,12 +63,12 @@ class Marginal:
     dist_type : string
         Name of the distribution. The default is `None`.
     parameters : list
-        Parameters corresponding to the distribution type. The
+        List of the parameters corresponding to the distribution type. The
         default is `None`.
     input_data : array
         Available input data. The default is `[]`.
     moments : list
-        Moments of the distribution. The default is `None`.
+        List of the moments.
     """
 
     def __init__(self):
diff --git a/src/bayesvalidrox/surrogate_models/reg_fast_ard.py b/src/bayesvalidrox/surrogate_models/reg_fast_ard.py
index fdd0ee747..e6883a3ed 100755
--- a/src/bayesvalidrox/surrogate_models/reg_fast_ard.py
+++ b/src/bayesvalidrox/surrogate_models/reg_fast_ard.py
@@ -236,7 +236,7 @@ class RegressionFastARD(LinearModel, RegressorMixin):
             self.var_y = False
 
         A = np.PINF * np.ones(n_features)
-        active = np.zeros(n_features, dtype=bool)
+        active = np.zeros(n_features, dtype=np.bool)
 
         if self.start is not None and not hasattr(self, 'active_'):
             start = self.start
diff --git a/src/bayesvalidrox/surrogate_models/surrogate_models.py b/src/bayesvalidrox/surrogate_models/surrogate_models.py
index d8a589dde..ca902f26b 100644
--- a/src/bayesvalidrox/surrogate_models/surrogate_models.py
+++ b/src/bayesvalidrox/surrogate_models/surrogate_models.py
@@ -4,284 +4,38 @@
 Implementation of metamodel as either PC, aPC or GPE
 """
 
-import copy
-import os
 import warnings
-
-import matplotlib.pyplot as plt
 import numpy as np
+import math
+import h5py
+import matplotlib.pyplot as plt
+from sklearn.preprocessing import MinMaxScaler
 import scipy as sp
-import sklearn.gaussian_process.kernels as kernels
-import sklearn.linear_model as lm
-from joblib import Parallel, delayed
-from scipy.optimize import minimize, NonlinearConstraint
+from scipy.optimize import minimize, NonlinearConstraint, LinearConstraint
+from tqdm import tqdm
 from sklearn.decomposition import PCA as sklearnPCA
+import sklearn.linear_model as lm
 from sklearn.gaussian_process import GaussianProcessRegressor
-from sklearn.preprocessing import MinMaxScaler
-from tqdm import tqdm
+import sklearn.gaussian_process.kernels as kernels
+import os
+from joblib import Parallel, delayed
+import copy
 
-from .apoly_construction import apoly_construction
-from .bayes_linear import VBLinearRegression, EBLinearRegression
-from .eval_rec_rule import eval_univ_basis
-from .glexindex import glexindex
 from .input_space import InputSpace
-from .orthogonal_matching_pursuit import OrthogonalMatchingPursuit
+from .glexindex import glexindex
+from .eval_rec_rule import eval_univ_basis
 from .reg_fast_ard import RegressionFastARD
 from .reg_fast_laplace import RegressionFastLaplace
-
+from .orthogonal_matching_pursuit import OrthogonalMatchingPursuit
+from .bayes_linear import VBLinearRegression, EBLinearRegression
+from .apoly_construction import apoly_construction
 warnings.filterwarnings("ignore")
 # Load the mplstyle
-# noinspection SpellCheckingInspection
 plt.style.use(os.path.join(os.path.split(__file__)[0],
                            '../', 'bayesvalidrox.mplstyle'))
 
 
-# noinspection SpellCheckingInspection
-def corr_loocv_error(clf, psi, coeffs, y):
-    """
-    Calculates the corrected LOO error for regression on regressor
-    matrix `psi` that generated the coefficients based on [1] and [2].
-
-    [1] Blatman, G., 2009. Adaptive sparse polynomial chaos expansions for
-        uncertainty propagation and sensitivity analysis (Doctoral
-        dissertation, Clermont-Ferrand 2).
-
-    [2] Blatman, G. and Sudret, B., 2011. Adaptive sparse polynomial chaos
-        expansion based on least angle regression. Journal of computational
-        Physics, 230(6), pp.2345-2367.
-
-    Parameters
-    ----------
-    clf : object
-        Fitted estimator.
-    psi : array of shape (n_samples, n_features)
-        The multivariate orthogonal polynomials (regressor).
-    coeffs : array-like of shape (n_features,)
-        Estimated cofficients.
-    y : array of shape (n_samples,)
-        Target values.
-
-    Returns
-    -------
-    R_2 : float
-        LOOCV Validation score (1-LOOCV erro).
-    residual : array of shape (n_samples,)
-        Residual values (y - predicted targets).
-
-    """
-    psi = np.array(psi, dtype=float)
-
-    # Create PSI_Sparse by removing redundent terms
-    nnz_idx = np.nonzero(coeffs)[0]
-    if len(nnz_idx) == 0:
-        nnz_idx = [0]
-    psi_sparse = psi[:, nnz_idx]
-
-    # NrCoeffs of aPCEs
-    P = len(nnz_idx)
-    # NrEvaluation (Size of experimental design)
-    N = psi.shape[0]
-
-    # Build the projection matrix
-    PsiTPsi = np.dot(psi_sparse.T, psi_sparse)
-
-    if np.linalg.cond(PsiTPsi) > 1e-12:  # and \
-        # np.linalg.cond(PsiTPsi) < 1/sys.float_info.epsilon:
-        # faster
-        try:
-            M = sp.linalg.solve(PsiTPsi,
-                                sp.sparse.eye(PsiTPsi.shape[0]).toarray())
-        except:
-            raise AttributeError(
-                'There are too few samples for the corrected loo-cv error. Fit surrogate on at least as many '
-                'samples as parameters to use this')
-    else:
-        # stabler
-        M = np.linalg.pinv(PsiTPsi)
-
-    # h factor (the full matrix is not calculated explicitly,
-    # only the trace is, to save memory)
-    PsiM = np.dot(psi_sparse, M)
-
-    h = np.sum(np.multiply(PsiM, psi_sparse), axis=1, dtype=np.longdouble)  # float128)
-
-    # ------ Calculate Error Loocv for each measurement point ----
-    # Residuals
-    try:
-        residual = clf.predict(psi) - y
-    except:
-        residual = np.dot(psi, coeffs) - y
-
-    # Variance
-    var_y = np.var(y)
-
-    if var_y == 0:
-        # norm_emp_error = 0
-        loo_error = 0
-        LCerror = np.zeros(y.shape)
-        return 1 - loo_error, LCerror
-    else:
-        # norm_emp_error = np.mean(residual ** 2) / var_y
-
-        # LCerror = np.divide(residual, (1-h))
-        LCerror = residual / (1 - h)
-        loo_error = np.mean(np.square(LCerror)) / var_y
-        # if there are NaNs, just return an infinite LOO error (this
-        # happens, e.g., when a strongly underdetermined problem is solved)
-        if np.isnan(loo_error):
-            loo_error = np.inf
-
-    # Corrected Error for over-determined system
-    tr_M = np.trace(M)
-    if tr_M < 0 or abs(tr_M) > 1e6:
-        tr_M = np.trace(np.linalg.pinv(np.dot(psi.T, psi)))
-
-    # Over-determined system of Equation
-    if N > P:
-        T_factor = N / (N - P) * (1 + tr_M)
-
-    # Under-determined system of Equation
-    else:
-        T_factor = np.inf
-
-    corrected_loo_error = loo_error * T_factor
-
-    R_2 = 1 - corrected_loo_error
-
-    return R_2, LCerror
-
-
-def create_psi(basis_indices, univ_p_val):
-    """
-    This function assemble the design matrix Psi from the given basis index
-    set INDICES and the univariate polynomial evaluations univ_p_val.
-
-    Parameters
-    ----------
-    basis_indices : array of shape (n_terms, n_params)
-        Multi-indices of multivariate polynomials.
-    univ_p_val : array of (n_samples, n_params, n_max+1)
-        All univariate regressors up to `n_max`.
-
-    Raises
-    ------
-    ValueError
-        n_terms in arguments do not match.
-
-    Returns
-    -------
-    psi : array of shape (n_samples, n_terms)
-        Multivariate regressors.
-
-    """
-    # Check if BasisIndices is a sparse matrix
-    sparsity = sp.sparse.issparse(basis_indices)
-    if sparsity:
-        basis_indices = basis_indices.toarray()
-
-    # Initialization and consistency checks
-    # number of input variables
-    n_params = univ_p_val.shape[1]
-
-    # Size of the experimental design
-    n_samples = univ_p_val.shape[0]
-
-    # number of basis terms
-    n_terms = basis_indices.shape[0]
-
-    # check that the variables have consistent sizes
-    if n_params != basis_indices.shape[1]:
-        raise ValueError(
-            f"The shapes of basis_indices ({basis_indices.shape[1]}) and "
-            f"univ_p_val ({n_params}) don't match!!"
-        )
-
-    # Preallocate the Psi matrix for performance
-    psi = np.ones((n_samples, n_terms))
-    # Assemble the Psi matrix
-    for m in range(basis_indices.shape[1]):
-        aa = np.where(basis_indices[:, m] > 0)[0]
-        try:
-            basisIdx = basis_indices[aa, m]
-            bb = univ_p_val[:, m, basisIdx].reshape(psi[:, aa].shape)
-            psi[:, aa] = np.multiply(psi[:, aa], bb)
-        except ValueError as err:
-            raise err
-    return psi
-
-
-def gaussian_process_emulator(X, y, nug_term=None, autoSelect=False,
-                              varIdx=None):
-    """
-    Fits a Gaussian Process Emulator to the target given the training
-     points.
-
-    Parameters
-    ----------
-    X : array of shape (n_samples, n_params)
-        Training points.
-    y : array of shape (n_samples,)
-        Target values.
-    nug_term : float, optional
-        Nugget term. The default is None, i.e. variance of y.
-    autoSelect : bool, optional
-        Loop over some kernels and select the best. The default is False.
-    varIdx : int, optional
-        The index number. The default is None.
-
-    Returns
-    -------
-    gp : object
-        Fitted estimator.
-
-    """
-
-    nug_term = nug_term if nug_term else np.var(y)
-
-    Kernels = [nug_term * kernels.RBF(length_scale=1.0,
-                                      length_scale_bounds=(1e-25, 1e15)),
-               nug_term * kernels.RationalQuadratic(length_scale=0.2,
-                                                    alpha=1.0),
-               nug_term * kernels.Matern(length_scale=1.0,
-                                         length_scale_bounds=(1e-15, 1e5),
-                                         nu=1.5)]
-
-    # Automatic selection of the kernel
-    if autoSelect:
-        gp = {}
-        BME = []
-        for i, kernel in enumerate(Kernels):
-            gp[i] = GaussianProcessRegressor(kernel=kernel,
-                                             n_restarts_optimizer=3,
-                                             normalize_y=False)
-
-            # Fit to data using Maximum Likelihood Estimation
-            gp[i].fit(X, y)
-
-            # Store the MLE as BME score
-            BME.append(gp[i].log_marginal_likelihood())
-
-        gp = gp[np.argmax(BME)]
-
-    else:
-        gp = GaussianProcessRegressor(kernel=Kernels[0],
-                                      n_restarts_optimizer=3,
-                                      normalize_y=False)
-        gp.fit(X, y)
-
-    # Compute score
-    if varIdx is not None:
-        Score = gp.score(X, y)
-        print('-' * 50)
-        print(f'Output variable {varIdx}:')
-        print('The estimation of GPE coefficients converged,')
-        print(f'with the R^2 score: {Score:.3f}')
-        print('-' * 50)
-
-    return gp
-
-
-class MetaModel:
+class MetaModel():
     """
     Meta (surrogate) model
 
@@ -328,7 +82,7 @@ class MetaModel:
         `'no'`. There are two ways to select number of components: use
         percentage of the explainable variance threshold (between 0 and 100)
         (Option A) or direct prescription of components' number (Option B):
-            >>> MetaModelOpts = MetaModel()
+
             >>> MetaModelOpts.dim_red_method = 'PCA'
             >>> MetaModelOpts.var_pca_threshold = 99.999  # Option A
             >>> MetaModelOpts.n_pca_components = 12 # Option B
@@ -356,7 +110,7 @@ class MetaModel:
     def __init__(self, input_obj, meta_model_type='PCE',
                  pce_reg_method='OLS', bootstrap_method='fast',
                  n_bootstrap_itrs=1, pce_deg=1, pce_q_norm=1.0,
-                 dim_red_method='no', apply_constraints=False,
+                 dim_red_method='no', apply_constraints = False, 
                  verbose=False):
 
         self.input_obj = input_obj
@@ -369,38 +123,8 @@ class MetaModel:
         self.dim_red_method = dim_red_method
         self.apply_constraints = apply_constraints
         self.verbose = verbose
-
-        # Other params
-        self.InputSpace = None
-        self.var_pca_threshold = None
-        self.polycoeffs = None
-        self.errorScale = None
-        self.errorclf_poly = None
-        self.errorRegMethod = None
-        self.nlc = None
-        self.univ_p_val = None
-        self.n_pca_components = None
-        self.out_names = None
-        self.allBasisIndices = None
-        self.deg_array = None
-        self.n_samples = None
-        self.CollocationPoints = None
-        self.pca = None
-        self.LCerror = None
-        self.clf_poly = None
-        self.score_dict = None
-        self.basis_dict = None
-        self.coeffs_dict = None
-        self.q_norm_dict = None
-        self.deg_dict = None
-        self.x_scaler = None
-        self.gp_poly = None
-        self.n_params = None
-        self.ndim = None
-        self.init_type = None
-        self.rmse = None
-
-    def build_metamodel(self, n_init_samples=None) -> None:
+ 
+    def build_metamodel(self, n_init_samples = None) -> None:
         """
         Builds the parts for the metamodel (polynomes,...) that are neede before fitting.
 
@@ -410,33 +134,31 @@ class MetaModel:
             DESCRIPTION.
 
         """
-
+        
         # Generate general warnings
         if self.apply_constraints or self.pce_reg_method.lower() == 'ols':
             print('There are no estimations of surrogate uncertainty available'
                   ' for the chosen regression options. This might lead to issues'
                   ' in later steps.')
-
-        if self.CollocationPoints is None:
-            raise AttributeError('Please provide samples to the metamodel before building it.')
-        self.CollocationPoints = np.array(self.CollocationPoints)
-
+        
         # Add InputSpace to MetaModel if it does not have any
-        if self.InputSpace is None:
-            if n_init_samples is None:
-                n_init_samples = self.CollocationPoints.shape[0]
+        if not hasattr(self, 'InputSpace'):
             self.InputSpace = InputSpace(self.input_obj)
             self.InputSpace.n_init_samples = n_init_samples
             self.InputSpace.init_param_space(np.max(self.pce_deg))
-
+            
         self.ndim = self.InputSpace.ndim
-
+        
+        if not hasattr(self, 'CollocationPoints'):
+            raise AttributeError('Please provide samples to the metamodel before building it.')
+            
         # Transform input samples
         # TODO: this is probably not yet correct! Make 'method' variable
-        self.CollocationPoints = self.InputSpace.transform(self.CollocationPoints, method='user')
+        self.CollocationPoints = self.InputSpace.transform(self.CollocationPoints, method='user') 
 
+        
         self.n_params = len(self.input_obj.Marginals)
-
+        
         # Generate polynomials
         if self.meta_model_type.lower() != 'gpe':
             self.generate_polynomials(np.max(self.pce_deg))
@@ -461,10 +183,8 @@ class MetaModel:
         self.CollocationPoints = np.array(self.CollocationPoints)
         self.n_samples, ndim = self.CollocationPoints.shape
         if self.ndim != ndim:
-            raise AttributeError(
-                'The given samples do not match the given number of priors. The samples should be a 2D array of size'
-                '(#samples, #priors)')
-
+            raise AttributeError('The given samples do not match the given number of priors. The samples should be a 2D array of size (#samples, #priors)')
+            
         self.deg_array = self.__select_degree(ndim, self.n_samples)
 
         # Generate all basis indices
@@ -474,13 +194,15 @@ class MetaModel:
             if deg not in np.fromiter(keys, dtype=float):
                 # Generate the polynomial basis indices
                 for qidx, q in enumerate(self.pce_q_norm):
-                    basis_indices = glexindex(start=0, stop=deg + 1,
+                    basis_indices = glexindex(start=0, stop=deg+1,
                                               dimensions=self.n_params,
                                               cross_truncation=q,
                                               reverse=False, graded=True)
                     self.allBasisIndices[str(deg)][str(q)] = basis_indices
 
-    def fit(self, X: np.array, y: dict, parallel=False, verbose=False):
+        
+        
+    def fit(self, X, y, parallel = True, verbose = False):
         """
         Fits the surrogate to the given data (samples X, outputs y).
         Note here that the samples X should be the transformed samples provided
@@ -492,43 +214,33 @@ class MetaModel:
             The parameter value combinations that the model was evaluated at.
         y : dict of 2D lists or arrays of shape (#samples, #timesteps)
             The respective model evaluations.
-        parallel : bool
-            Set to True to run the training in parallel for various keys.
-            The default is False.
-        verbose : bool
-            Set to True to obtain more information during runtime.
-            The default is False.
 
         Returns
         -------
         None.
 
         """
-        #        print(X)
-        #        print(X.shape)
-        #        print(y)
-        #        print(y['Z'].shape)
         X = np.array(X)
         for key in y.keys():
             y_val = np.array(y[key])
-            if y_val.ndim != 2:
+            if y_val.ndim !=2:
                 raise ValueError('The given outputs y should be 2D')
             y[key] = np.array(y[key])
-
+        
         # Output names are the same as the keys in y
         self.out_names = list(y.keys())
-
+        
         # Build the MetaModel on the static samples
         self.CollocationPoints = X
-
+        
         # TODO: other option: rebuild every time
-        if self.deg_array is None:
-            self.build_metamodel(n_init_samples=X.shape[1])
-
+        if not hasattr(self, 'deg_array'):
+            self.build_metamodel(n_init_samples = X.shape[1])
+            
         # Evaluate the univariate polynomials on InputSpace
         if self.meta_model_type.lower() != 'gpe':
-            self.univ_p_val = self.univ_basis_vals(self.CollocationPoints)
-
+           self.univ_p_val = self.univ_basis_vals(self.CollocationPoints)
+        
         # --- Loop through data points and fit the surrogate ---
         if verbose:
             print(f"\n>>>> Training the {self.meta_model_type} metamodel "
@@ -540,10 +252,10 @@ class MetaModel:
             self.n_bootstrap_itrs = 100
 
         # Check if fast version (update coeffs with OLS) is selected.
-        n_comp_dict = {}
-        first_out = {}
         if self.bootstrap_method.lower() == 'fast':
             fast_bootstrap = True
+            first_out = {}
+            n_comp_dict = {}
         else:
             fast_bootstrap = False
 
@@ -583,8 +295,8 @@ class MetaModel:
                     # Start transformation
                     pca, target, n_comp = self.pca_transformation(
                         Output[b_indices], verbose=False
-                    )
-                    self.pca[f'b_{b_i + 1}'][key] = pca
+                        )
+                    self.pca[f'b_{b_i+1}'][key] = pca
                     # Store the number of components for fast bootsrtrapping
                     if fast_bootstrap and b_i == 0:
                         n_comp_dict[key] = n_comp
@@ -592,40 +304,39 @@ class MetaModel:
                     target = Output[b_indices]
 
                 # Parallel fit regression
-                out = None
                 if self.meta_model_type.lower() == 'gpe':
                     # Prepare the input matrix
                     scaler = MinMaxScaler()
                     X_S = scaler.fit_transform(X_train_b)
 
-                    self.x_scaler[f'b_{b_i + 1}'][key] = scaler
+                    self.x_scaler[f'b_{b_i+1}'][key] = scaler
                     if parallel:
                         out = Parallel(n_jobs=-1, backend='multiprocessing')(
-                            delayed(gaussian_process_emulator)(
+                            delayed(self.gaussian_process_emulator)(
                                 X_S, target[:, idx]) for idx in
                             range(target.shape[1]))
                     else:
-                        results = map(gaussian_process_emulator,
-                                      [X_train_b] * target.shape[1],
+                        results = map(self.gaussian_process_emulator,
+                                      [X_train_b]*target.shape[1],
                                       [target[:, idx] for idx in
                                        range(target.shape[1])]
                                       )
                         out = list(results)
 
                     for idx in range(target.shape[1]):
-                        self.gp_poly[f'b_{b_i + 1}'][key][f"y_{idx + 1}"] = out[idx]
+                        self.gp_poly[f'b_{b_i+1}'][key][f"y_{idx+1}"] = out[idx]
 
                 else:
                     self.univ_p_val = self.univ_p_val[b_indices]
                     if parallel and (not fast_bootstrap or b_i == 0):
                         out = Parallel(n_jobs=-1, backend='multiprocessing')(
-                            delayed(self.adaptive_regression)(  # X_train_b,
-                                target[:, idx],
-                                idx)
+                            delayed(self.adaptive_regression)(X_train_b,
+                                                              target[:, idx],
+                                                              idx)
                             for idx in range(target.shape[1]))
                     elif not parallel and (not fast_bootstrap or b_i == 0):
                         results = map(self.adaptive_regression,
-                                      # [X_train_b] * target.shape[1],
+                                      [X_train_b]*target.shape[1],
                                       [target[:, idx] for idx in
                                        range(target.shape[1])],
                                       range(target.shape[1]))
@@ -636,26 +347,27 @@ class MetaModel:
                         first_out[key] = copy.deepcopy(out)
 
                     if b_i > 0 and fast_bootstrap:
+
                         # fast bootstrap
                         out = self.update_pce_coeffs(
                             X_train_b, target, first_out[key])
 
                     for i in range(target.shape[1]):
                         # Create a dict to pass the variables
-                        self.deg_dict[f'b_{b_i + 1}'][key][f"y_{i + 1}"] = out[i]['degree']
-                        self.q_norm_dict[f'b_{b_i + 1}'][key][f"y_{i + 1}"] = out[i]['qnorm']
-                        self.coeffs_dict[f'b_{b_i + 1}'][key][f"y_{i + 1}"] = out[i]['coeffs']
-                        self.basis_dict[f'b_{b_i + 1}'][key][f"y_{i + 1}"] = out[i]['multi_indices']
-                        self.score_dict[f'b_{b_i + 1}'][key][f"y_{i + 1}"] = out[i]['LOOCVScore']
-                        self.clf_poly[f'b_{b_i + 1}'][key][f"y_{i + 1}"] = out[i]['clf_poly']
-                        # self.LCerror[f'b_{b_i+1}'][key][f"y_{i+1}"] = out[i]['LCerror']
+                        self.deg_dict[f'b_{b_i+1}'][key][f"y_{i+1}"] = out[i]['degree']
+                        self.q_norm_dict[f'b_{b_i+1}'][key][f"y_{i+1}"] = out[i]['qnorm']
+                        self.coeffs_dict[f'b_{b_i+1}'][key][f"y_{i+1}"] = out[i]['coeffs']
+                        self.basis_dict[f'b_{b_i+1}'][key][f"y_{i+1}"] = out[i]['multi_indices']
+                        self.score_dict[f'b_{b_i+1}'][key][f"y_{i+1}"] = out[i]['LOOCVScore']
+                        self.clf_poly[f'b_{b_i+1}'][key][f"y_{i+1}"] = out[i]['clf_poly']
+                        #self.LCerror[f'b_{b_i+1}'][key][f"y_{i+1}"] = out[i]['LCerror']
 
         if verbose:
             print(f"\n>>>> Training the {self.meta_model_type} metamodel"
                   " sucessfully completed. <<<<<<\n")
 
     # -------------------------------------------------------------------------
-    def update_pce_coeffs(self, X, y, out_dict=None):
+    def update_pce_coeffs(self, X, y, out_dict = None):
         """
         Updates the PCE coefficents using only the ordinary least square method
         for the fast version of the bootstrapping.
@@ -676,26 +388,26 @@ class MetaModel:
             The updated training output dictionary.
 
         """
-        # TODO: why is X not used here?
         # Make a copy
         final_out_dict = copy.deepcopy(out_dict)
 
         # Loop over the points
         for i in range(y.shape[1]):
 
+                    
             # Extract nonzero basis indices
             nnz_idx = np.nonzero(out_dict[i]['coeffs'])[0]
             if len(nnz_idx) != 0:
                 basis_indices = out_dict[i]['multi_indices']
 
                 # Evaluate the multivariate polynomials on CollocationPoints
-                psi = create_psi(basis_indices, self.univ_p_val)
+                psi = self.create_psi(basis_indices, self.univ_p_val)
 
                 # Calulate the cofficients of surrogate model
                 updated_out = self.regression(
                     psi, y[:, i], basis_indices, reg_method='OLS',
                     sparsity=False
-                )
+                    )
 
                 # Update coeffs in out_dict
                 final_out_dict[i]['coeffs'][nnz_idx] = updated_out['coeffs']
@@ -713,7 +425,7 @@ class MetaModel:
 
         """
         self.InputSpace = InputSpace(self.input_obj,
-                                     meta_Model_type=self.meta_model_type)
+                                    meta_Model_type=self.meta_model_type)
 
     # -------------------------------------------------------------------------
     def univ_basis_vals(self, samples, n_max=None):
@@ -750,6 +462,63 @@ class MetaModel:
         return univ_basis
 
     # -------------------------------------------------------------------------
+    def create_psi(self, basis_indices, univ_p_val):
+        """
+        This function assemble the design matrix Psi from the given basis index
+        set INDICES and the univariate polynomial evaluations univ_p_val.
+
+        Parameters
+        ----------
+        basis_indices : array of shape (n_terms, n_params)
+            Multi-indices of multivariate polynomials.
+        univ_p_val : array of (n_samples, n_params, n_max+1)
+            All univariate regressors up to `n_max`.
+
+        Raises
+        ------
+        ValueError
+            n_terms in arguments do not match.
+
+        Returns
+        -------
+        psi : array of shape (n_samples, n_terms)
+            Multivariate regressors.
+
+        """
+        # Check if BasisIndices is a sparse matrix
+        sparsity = sp.sparse.issparse(basis_indices)
+        if sparsity:
+            basis_indices = basis_indices.toarray()
+
+        # Initialization and consistency checks
+        # number of input variables
+        n_params = univ_p_val.shape[1]
+
+        # Size of the experimental design
+        n_samples = univ_p_val.shape[0]
+
+        # number of basis terms
+        n_terms = basis_indices.shape[0]
+
+        # check that the variables have consistent sizes
+        if n_params != basis_indices.shape[1]:
+            raise ValueError(
+                f"The shapes of basis_indices ({basis_indices.shape[1]}) and "
+                f"univ_p_val ({n_params}) don't match!!"
+                )
+
+        # Preallocate the Psi matrix for performance
+        psi = np.ones((n_samples, n_terms))
+        # Assemble the Psi matrix
+        for m in range(basis_indices.shape[1]):
+            aa = np.where(basis_indices[:, m] > 0)[0]
+            try:
+                basisIdx = basis_indices[aa, m]
+                bb = univ_p_val[:, m, basisIdx].reshape(psi[:, aa].shape)
+                psi[:, aa] = np.multiply(psi[:, aa], bb)
+            except ValueError as err:
+                raise err
+        return psi
 
     # -------------------------------------------------------------------------
     def regression(self, X, y, basis_indices, reg_method=None, sparsity=True):
@@ -767,8 +536,6 @@ class MetaModel:
             Multi-indices of multivariate polynomials.
         reg_method : str, optional
             DESCRIPTION. The default is None.
-        sparsity : bool
-            Use with sparsity-inducing training methods. The default is True
 
         Returns
         -------
@@ -790,23 +557,22 @@ class MetaModel:
             Lambda = 1e-6
 
         # Bayes sparse adaptive aPCE
-        clf_poly = None
         if reg_method.lower() == 'ols':
             clf_poly = lm.LinearRegression(fit_intercept=False)
         elif reg_method.lower() == 'brr':
             clf_poly = lm.BayesianRidge(n_iter=1000, tol=1e-7,
                                         fit_intercept=False,
-                                        # normalize=True,
+                                        #normalize=True,
                                         compute_score=compute_score,
                                         alpha_1=1e-04, alpha_2=1e-04,
                                         lambda_1=Lambda, lambda_2=Lambda)
             clf_poly.converged = True
 
         elif reg_method.lower() == 'ard':
-            if X.shape[0] < 2:
+            if X.shape[0]<2:
                 raise ValueError('Regression with ARD can only be performed for more than 2 samples')
             clf_poly = lm.ARDRegression(fit_intercept=False,
-                                        # normalize=True,
+                                        #normalize=True,
                                         compute_score=compute_score,
                                         n_iter=1000, tol=0.0001,
                                         alpha_1=1e-3, alpha_2=1e-3,
@@ -819,14 +585,14 @@ class MetaModel:
                                          n_iter=300, tol=1e-10)
 
         elif reg_method.lower() == 'bcs':
-            if X.shape[0] < 10:
+            if X.shape[0]<10:
                 raise ValueError('Regression with BCS can only be performed for more than 10 samples')
             clf_poly = RegressionFastLaplace(fit_intercept=False,
-                                             bias_term=bias_term,
-                                             n_iter=1000, tol=1e-7)
+                                         bias_term=bias_term,
+                                         n_iter=1000, tol=1e-7)
 
         elif reg_method.lower() == 'lars':
-            if X.shape[0] < 10:
+            if X.shape[0]<10:
                 raise ValueError('Regression with LARS can only be performed for more than 5 samples')
             clf_poly = lm.LassoLarsCV(fit_intercept=False)
 
@@ -842,29 +608,29 @@ class MetaModel:
 
         elif reg_method.lower() == 'ebl':
             clf_poly = EBLinearRegression(optimizer='em')
-
+            
+        
         # Training with constraints automatically uses L2
-        if self.apply_constraints:
+        if self.apply_constraints:       
             # TODO: set the constraints here
             # Define the nonlin. constraint     
-            nlc = NonlinearConstraint(lambda x: np.matmul(X, x), -1, 1.1)
+            nlc = NonlinearConstraint(lambda x: np.matmul(X,x),-1,1.1)
             self.nlc = nlc
-
-            fun = lambda x: (np.linalg.norm(np.matmul(X, x) - y, ord=2)) ** 2
-            res = None
-            if self.init_type == 'zeros':
-                res = minimize(fun, np.zeros(X.shape[1]), method='trust-constr', constraints=self.nlc)
+            
+            fun = lambda x: (np.linalg.norm(np.matmul(X, x)-y, ord = 2))**2
+            if self.init_type =='zeros':
+                res = minimize(fun, np.zeros(X.shape[1]), method = 'trust-constr', constraints  = self.nlc) 
             if self.init_type == 'nonpi':
                 clf_poly.fit(X, y)
                 coeff = clf_poly.coef_
-                res = minimize(fun, coeff, method='trust-constr', constraints=self.nlc)
-
+                res = minimize(fun, coeff, method = 'trust-constr', constraints  = self.nlc)
+            
             coeff = np.array(res.x)
             clf_poly.coef_ = coeff
             clf_poly.X = X
             clf_poly.y = y
             clf_poly.intercept_ = 0
-
+            
         # Training without constraints uses chosen regression method
         else:
             clf_poly.fit(X, y)
@@ -892,15 +658,76 @@ class MetaModel:
         return_out_dict['sparePsi'] = sparse_X
         return_out_dict['coeffs'] = coeffs
         return return_out_dict
+    
+    # -------------------------------------------------------------------------
+    def create_psi(self, basis_indices, univ_p_val):
+        """
+        This function assemble the design matrix Psi from the given basis index
+        set INDICES and the univariate polynomial evaluations univ_p_val.
+
+        Parameters
+        ----------
+        basis_indices : array of shape (n_terms, n_params)
+            Multi-indices of multivariate polynomials.
+        univ_p_val : array of (n_samples, n_params, n_max+1)
+            All univariate regressors up to `n_max`.
+
+        Raises
+        ------
+        ValueError
+            n_terms in arguments do not match.
+
+        Returns
+        -------
+        psi : array of shape (n_samples, n_terms)
+            Multivariate regressors.
+
+        """
+        # Check if BasisIndices is a sparse matrix
+        sparsity = sp.sparse.issparse(basis_indices)
+        if sparsity:
+            basis_indices = basis_indices.toarray()
+
+        # Initialization and consistency checks
+        # number of input variables
+        n_params = univ_p_val.shape[1]
+
+        # Size of the experimental design
+        n_samples = univ_p_val.shape[0]
+
+        # number of basis terms
+        n_terms = basis_indices.shape[0]
+
+        # check that the variables have consistent sizes
+        if n_params != basis_indices.shape[1]:
+            raise ValueError(
+                f"The shapes of basis_indices ({basis_indices.shape[1]}) and "
+                f"univ_p_val ({n_params}) don't match!!"
+                )
+
+        # Preallocate the Psi matrix for performance
+        psi = np.ones((n_samples, n_terms))
+        # Assemble the Psi matrix
+        for m in range(basis_indices.shape[1]):
+            aa = np.where(basis_indices[:, m] > 0)[0]
+            try:
+                basisIdx = basis_indices[aa, m]
+                bb = univ_p_val[:, m, basisIdx].reshape(psi[:, aa].shape)
+                psi[:, aa] = np.multiply(psi[:, aa], bb)
+            except ValueError as err:
+                raise err
+        return psi
 
     # --------------------------------------------------------------------------------------------------------
-    def adaptive_regression(self, ED_Y, varIdx, verbose=False):
+    def adaptive_regression(self, ED_X, ED_Y, varIdx, verbose=False):
         """
         Adaptively fits the PCE model by comparing the scores of different
         degrees and q-norm.
 
         Parameters
         ----------
+        ED_X : array of shape (n_samples, n_params)
+            Experimental design.
         ED_Y : array of shape (n_samples,)
             Target values, i.e. simulation results for the Experimental design.
         varIdx : int
@@ -916,7 +743,7 @@ class MetaModel:
 
         """
 
-        # n_samples, n_params = ED_X.shape
+        n_samples, n_params = ED_X.shape
         # Initialization
         qAllCoeffs, AllCoeffs = {}, {}
         qAllIndices_Sparse, AllIndices_Sparse = {}, {}
@@ -924,7 +751,7 @@ class MetaModel:
         qAllnTerms, AllnTerms = {}, {}
         qAllLCerror, AllLCerror = {}, {}
 
-        # Extract degree array and q-norm array
+        # Extract degree array and qnorm array
         deg_array = np.array([*self.allBasisIndices], dtype=int)
         qnorm = [*self.allBasisIndices[str(int(deg_array[0]))]]
 
@@ -936,7 +763,7 @@ class MetaModel:
         n_checks_qNorm = 2
         nqnorms = len(qnorm)
         qNormEarlyStop = True
-        if nqnorms < n_checks_qNorm + 1:
+        if nqnorms < n_checks_qNorm+1:
             qNormEarlyStop = False
 
         # =====================================================================
@@ -944,7 +771,6 @@ class MetaModel:
         # polynomial degree until the highest accuracy is reached
         # =====================================================================
         # For each degree check all q-norms and choose the best one
-        best_q = None
         scores = -np.inf * np.ones(deg_array.shape[0])
         qNormScores = -np.inf * np.ones(nqnorms)
 
@@ -957,38 +783,38 @@ class MetaModel:
                 BasisIndices = self.allBasisIndices[str(deg)][str(q)]
 
                 # Assemble the Psi matrix
-                Psi = create_psi(BasisIndices, self.univ_p_val)
+                Psi = self.create_psi(BasisIndices, self.univ_p_val)
 
-                # Calulate the cofficients of the metamodel
+                # Calulate the cofficients of the meta model
                 outs = self.regression(Psi, ED_Y, BasisIndices)
 
                 # Calculate and save the score of LOOCV
-                score, LCerror = corr_loocv_error(outs['clf_poly'],
-                                                  outs['sparePsi'],
-                                                  outs['coeffs'],
-                                                  ED_Y)
+                score, LCerror = self.corr_loocv_error(outs['clf_poly'],
+                                                       outs['sparePsi'],
+                                                       outs['coeffs'],
+                                                       ED_Y)
 
                 # Check the convergence of noise for FastARD
                 if self.pce_reg_method == 'FastARD' and \
-                        outs['clf_poly'].alpha_ < np.finfo(np.float32).eps:
+                   outs['clf_poly'].alpha_ < np.finfo(np.float32).eps:
                     score = -np.inf
 
                 qNormScores[qidx] = score
-                qAllCoeffs[str(qidx + 1)] = outs['coeffs']
-                qAllIndices_Sparse[str(qidx + 1)] = outs['spareMulti-Index']
-                qAllclf_poly[str(qidx + 1)] = outs['clf_poly']
-                qAllnTerms[str(qidx + 1)] = BasisIndices.shape[0]
-                qAllLCerror[str(qidx + 1)] = LCerror
+                qAllCoeffs[str(qidx+1)] = outs['coeffs']
+                qAllIndices_Sparse[str(qidx+1)] = outs['spareMulti-Index']
+                qAllclf_poly[str(qidx+1)] = outs['clf_poly']
+                qAllnTerms[str(qidx+1)] = BasisIndices.shape[0]
+                qAllLCerror[str(qidx+1)] = LCerror
 
                 # EarlyStop check
                 # if there are at least n_checks_qNorm entries after the
                 # best one, we stop
                 if qNormEarlyStop and \
-                        sum(np.isfinite(qNormScores)) > n_checks_qNorm:
+                   sum(np.isfinite(qNormScores)) > n_checks_qNorm:
                     # If the error has increased the last two iterations, stop!
                     qNormScores_nonInf = qNormScores[np.isfinite(qNormScores)]
                     deltas = np.sign(np.diff(qNormScores_nonInf))
-                    if sum(deltas[-n_checks_qNorm + 1:]) == 2:
+                    if sum(deltas[-n_checks_qNorm+1:]) == 2:
                         # stop the q-norm loop here
                         break
                 if np.var(ED_Y) == 0:
@@ -998,11 +824,11 @@ class MetaModel:
             best_q = np.nanargmax(qNormScores)
             scores[degIdx] = qNormScores[best_q]
 
-            AllCoeffs[str(degIdx + 1)] = qAllCoeffs[str(best_q + 1)]
-            AllIndices_Sparse[str(degIdx + 1)] = qAllIndices_Sparse[str(best_q + 1)]
-            Allclf_poly[str(degIdx + 1)] = qAllclf_poly[str(best_q + 1)]
-            AllnTerms[str(degIdx + 1)] = qAllnTerms[str(best_q + 1)]
-            AllLCerror[str(degIdx + 1)] = qAllLCerror[str(best_q + 1)]
+            AllCoeffs[str(degIdx+1)] = qAllCoeffs[str(best_q+1)]
+            AllIndices_Sparse[str(degIdx+1)] = qAllIndices_Sparse[str(best_q+1)]
+            Allclf_poly[str(degIdx+1)] = qAllclf_poly[str(best_q+1)]
+            AllnTerms[str(degIdx+1)] = qAllnTerms[str(best_q+1)]
+            AllLCerror[str(degIdx+1)] = qAllLCerror[str(best_q+1)]
 
             # Check the direction of the error (on average):
             # if it increases consistently stop the iterations
@@ -1010,7 +836,7 @@ class MetaModel:
                 scores_nonInf = scores[scores != -np.inf]
                 ss = np.sign(scores_nonInf - np.max(scores_nonInf))
                 # ss<0 error decreasing
-                errorIncreases = np.sum(np.sum(ss[-2:])) <= -1 * n_checks_degree
+                errorIncreases = np.sum(np.sum(ss[-2:])) <= -1*n_checks_degree
 
             if errorIncreases:
                 break
@@ -1021,7 +847,7 @@ class MetaModel:
 
         # ------------------ Summary of results ------------------
         # Select the one with the best score and save the necessary outputs
-        best_deg = np.nanargmax(scores) + 1
+        best_deg = np.nanargmax(scores)+1
         coeffs = AllCoeffs[str(best_deg)]
         basis_indices = AllIndices_Sparse[str(best_deg)]
         clf_poly = Allclf_poly[str(best_deg)]
@@ -1037,24 +863,24 @@ class MetaModel:
             nnz_idx = np.nonzero(coeffs)[0]
             BasisIndices_Sparse = basis_indices[nnz_idx]
 
-            print(f'Output variable {varIdx + 1}:')
+            print(f'Output variable {varIdx+1}:')
             print('The estimation of PCE coefficients converged at polynomial '
-                  f'degree {deg_array[best_deg - 1]} with '
+                  f'degree {deg_array[best_deg-1]} with '
                   f'{len(BasisIndices_Sparse)} terms (Sparsity index = '
-                  f'{round(len(BasisIndices_Sparse) / P, 3)}).')
+                  f'{round(len(BasisIndices_Sparse)/P, 3)}).')
 
-            print(f'Final ModLOO error estimate: {1 - max(scores):.3e}')
-            print('\n' + '-' * 50)
+            print(f'Final ModLOO error estimate: {1-max(scores):.3e}')
+            print('\n'+'-'*50)
 
         if verbose:
-            print('=' * 50)
-            print(' ' * 10 + ' Summary of results ')
-            print('=' * 50)
+            print('='*50)
+            print(' '*10 + ' Summary of results ')
+            print('='*50)
 
             print("Scores:\n", scores)
-            print("Degree of best score:", self.deg_array[best_deg - 1])
+            print("Degree of best score:", self.deg_array[best_deg-1])
             print("No. of terms:", len(basis_indices))
-            print("Sparsity index:", round(len(basis_indices) / P, 3))
+            print("Sparsity index:", round(len(basis_indices)/P, 3))
             print("Best Indices:\n", basis_indices)
 
             if self.pce_reg_method in ['BRR', 'ARD']:
@@ -1073,7 +899,7 @@ class MetaModel:
 
                 plt.text(0.75, 0.5, text, fontsize=18, transform=ax.transAxes)
                 plt.show()
-            print('=' * 80)
+            print('='*80)
 
         # Create a dict to pass the outputs
         returnVars = dict()
@@ -1087,6 +913,118 @@ class MetaModel:
 
         return returnVars
 
+    # -------------------------------------------------------------------------
+    def corr_loocv_error(self, clf, psi, coeffs, y):
+        """
+        Calculates the corrected LOO error for regression on regressor
+        matrix `psi` that generated the coefficients based on [1] and [2].
+
+        [1] Blatman, G., 2009. Adaptive sparse polynomial chaos expansions for
+            uncertainty propagation and sensitivity analysis (Doctoral
+            dissertation, Clermont-Ferrand 2).
+
+        [2] Blatman, G. and Sudret, B., 2011. Adaptive sparse polynomial chaos
+            expansion based on least angle regression. Journal of computational
+            Physics, 230(6), pp.2345-2367.
+
+        Parameters
+        ----------
+        clf : object
+            Fitted estimator.
+        psi : array of shape (n_samples, n_features)
+            The multivariate orthogonal polynomials (regressor).
+        coeffs : array-like of shape (n_features,)
+            Estimated cofficients.
+        y : array of shape (n_samples,)
+            Target values.
+
+        Returns
+        -------
+        R_2 : float
+            LOOCV Validation score (1-LOOCV erro).
+        residual : array of shape (n_samples,)
+            Residual values (y - predicted targets).
+
+        """
+        psi = np.array(psi, dtype=float)
+
+        # Create PSI_Sparse by removing redundent terms
+        nnz_idx = np.nonzero(coeffs)[0]
+        if len(nnz_idx) == 0:
+            nnz_idx = [0]
+        psi_sparse = psi[:, nnz_idx]
+
+        # NrCoeffs of aPCEs
+        P = len(nnz_idx)
+        # NrEvaluation (Size of experimental design)
+        N = psi.shape[0]
+
+        # Build the projection matrix
+        PsiTPsi = np.dot(psi_sparse.T, psi_sparse)
+
+        if np.linalg.cond(PsiTPsi) > 1e-12: #and \
+           # np.linalg.cond(PsiTPsi) < 1/sys.float_info.epsilon:
+            # faster
+            try:
+                M = sp.linalg.solve(PsiTPsi,
+                                sp.sparse.eye(PsiTPsi.shape[0]).toarray())
+            except:
+                raise AttributeError('There are too few samples for the corrected loo-cv error. Fit surrogate on at least as many samples as parameters to use this')
+        else:
+            # stabler
+            M = np.linalg.pinv(PsiTPsi)
+
+        # h factor (the full matrix is not calculated explicitly,
+        # only the trace is, to save memory)
+        PsiM = np.dot(psi_sparse, M)
+
+        h = np.sum(np.multiply(PsiM, psi_sparse), axis=1, dtype=np.longdouble)#float128)
+
+        # ------ Calculate Error Loocv for each measurement point ----
+        # Residuals
+        try:
+            residual = clf.predict(psi) - y
+        except:
+            residual = np.dot(psi, coeffs) - y
+
+        # Variance
+        var_y = np.var(y)
+
+        if var_y == 0:
+            norm_emp_error = 0
+            loo_error = 0
+            LCerror = np.zeros((y.shape))
+            return 1-loo_error, LCerror
+        else:
+            norm_emp_error = np.mean(residual**2)/var_y
+
+            # LCerror = np.divide(residual, (1-h))
+            LCerror = residual / (1-h)
+            loo_error = np.mean(np.square(LCerror)) / var_y
+            # if there are NaNs, just return an infinite LOO error (this
+            # happens, e.g., when a strongly underdetermined problem is solved)
+            if np.isnan(loo_error):
+                loo_error = np.inf
+
+        # Corrected Error for over-determined system
+        tr_M = np.trace(M)
+        if tr_M < 0 or abs(tr_M) > 1e6:
+            tr_M = np.trace(np.linalg.pinv(np.dot(psi.T, psi)))
+
+        # Over-determined system of Equation
+        if N > P:
+            T_factor = N/(N-P) * (1 + tr_M)
+
+        # Under-determined system of Equation
+        else:
+            T_factor = np.inf
+
+        corrected_loo_error = loo_error * T_factor
+
+        R_2 = 1 - corrected_loo_error
+
+        return R_2, LCerror
+
     # -------------------------------------------------------------------------
     def pca_transformation(self, target, verbose=False):
         """
@@ -1096,9 +1034,6 @@ class MetaModel:
         ----------
         target : array of shape (n_samples,)
             Target values.
-        verbose : bool
-            Set to True to get more information during functtion call.
-            The default is False.
 
         Returns
         -------
@@ -1111,20 +1046,20 @@ class MetaModel:
 
         """
         # Transform via Principal Component Analysis
-        if self.var_pca_threshold is None:
+        if hasattr(self, 'var_pca_threshold'):
             var_pca_threshold = self.var_pca_threshold
         else:
             var_pca_threshold = 100.0
         n_samples, n_features = target.shape
 
-        if self.n_pca_components is None:
+        if hasattr(self, 'n_pca_components'):
             n_pca_components = self.n_pca_components
         else:
             # Instantiate and fit sklearnPCA object
             covar_matrix = sklearnPCA(n_components=None)
             covar_matrix.fit(target)
             var = np.cumsum(np.round(covar_matrix.explained_variance_ratio_,
-                                     decimals=5) * 100)
+                                     decimals=5)*100)
             # Find the number of components to explain self.varPCAThreshold of
             # variance
             try:
@@ -1149,16 +1084,95 @@ class MetaModel:
 
         return pca, scaled_target, n_pca_components
 
+    # -------------------------------------------------------------------------
+    def gaussian_process_emulator(self, X, y, nug_term=None, autoSelect=False,
+                                  varIdx=None):
+        """
+        Fits a Gaussian Process Emulator to the target given the training
+         points.
+
+        Parameters
+        ----------
+        X : array of shape (n_samples, n_params)
+            Training points.
+        y : array of shape (n_samples,)
+            Target values.
+        nug_term : float, optional
+            Nugget term. The default is None, i.e. variance of y.
+        autoSelect : bool, optional
+            Loop over some kernels and select the best. The default is False.
+        varIdx : int, optional
+            The index number. The default is None.
+
+        Returns
+        -------
+        gp : object
+            Fitted estimator.
+
+        """
+
+        nug_term = nug_term if nug_term else np.var(y)
+
+        Kernels = [nug_term * kernels.RBF(length_scale=1.0,
+                                          length_scale_bounds=(1e-25, 1e15)),
+                   nug_term * kernels.RationalQuadratic(length_scale=0.2,
+                                                        alpha=1.0),
+                   nug_term * kernels.Matern(length_scale=1.0,
+                                             length_scale_bounds=(1e-15, 1e5),
+                                             nu=1.5)]
+
+        # Automatic selection of the kernel
+        if autoSelect:
+            gp = {}
+            BME = []
+            for i, kernel in enumerate(Kernels):
+                gp[i] = GaussianProcessRegressor(kernel=kernel,
+                                                 n_restarts_optimizer=3,
+                                                 normalize_y=False)
+
+                # Fit to data using Maximum Likelihood Estimation
+                gp[i].fit(X, y)
+
+                # Store the MLE as BME score
+                BME.append(gp[i].log_marginal_likelihood())
+
+            gp = gp[np.argmax(BME)]
+
+        else:
+            gp = GaussianProcessRegressor(kernel=Kernels[0],
+                                          n_restarts_optimizer=3,
+                                          normalize_y=False)
+            gp.fit(X, y)
+
+        # Compute score
+        if varIdx is not None:
+            Score = gp.score(X, y)
+            print('-'*50)
+            print(f'Output variable {varIdx}:')
+            print('The estimation of GPE coefficients converged,')
+            print(f'with the R^2 score: {Score:.3f}')
+            print('-'*50)
+
+        return gp
+
     # -------------------------------------------------------------------------
     def eval_metamodel(self, samples):
         """
-        Evaluates metamodel at the requested samples. One can also generate
+        Evaluates meta-model at the requested samples. One can also generate
         nsamples.
 
         Parameters
         ----------
         samples : array of shape (n_samples, n_params), optional
-            Samples to evaluate metamodel at. The default is None.
+            Samples to evaluate meta-model at. The default is None.
+        nsamples : int, optional
+            Number of samples to generate, if no `samples` is provided. The
+            default is None.
+        sampling_method : str, optional
+            Type of sampling, if no `samples` is provided. The default is
+            'random'.
+        return_samples : bool, optional
+            Retun samples, if no `samples` is provided. The default is False.
 
         Returns
         -------
@@ -1169,33 +1183,29 @@ class MetaModel:
         """
         # Transform into np array - can also be given as list
         samples = np.array(samples)
-
+        
         # Transform samples to the independent space
         samples = self.InputSpace.transform(
             samples,
             method='user'
-        )
+            )
         # Compute univariate bases for the given samples
-        univ_p_val = None
         if self.meta_model_type.lower() != 'gpe':
             univ_p_val = self.univ_basis_vals(
                 samples,
                 n_max=np.max(self.pce_deg)
-            )
+                )
 
-        mean_pred = None
-        std_pred = None
         mean_pred_b = {}
         std_pred_b = {}
-        b_i = 0
         # Loop over bootstrap iterations
         for b_i in range(self.n_bootstrap_itrs):
 
             # Extract model dictionary
             if self.meta_model_type.lower() == 'gpe':
-                model_dict = self.gp_poly[f'b_{b_i + 1}']
+                model_dict = self.gp_poly[f'b_{b_i+1}']
             else:
-                model_dict = self.coeffs_dict[f'b_{b_i + 1}']
+                model_dict = self.coeffs_dict[f'b_{b_i+1}']
 
             # Loop over outputs
             mean_pred = {}
@@ -1209,30 +1219,30 @@ class MetaModel:
 
                     # Prediction with GPE
                     if self.meta_model_type.lower() == 'gpe':
-                        X_T = self.x_scaler[f'b_{b_i + 1}'][output].transform(samples)
-                        gp = self.gp_poly[f'b_{b_i + 1}'][output][in_key]
+                        X_T = self.x_scaler[f'b_{b_i+1}'][output].transform(samples)
+                        gp = self.gp_poly[f'b_{b_i+1}'][output][in_key]
                         y_mean, y_std = gp.predict(X_T, return_std=True)
 
                     else:
                         # Prediction with PCE
                         # Assemble Psi matrix
-                        basis = self.basis_dict[f'b_{b_i + 1}'][output][in_key]
-                        psi = create_psi(basis, univ_p_val)
+                        basis = self.basis_dict[f'b_{b_i+1}'][output][in_key]
+                        psi = self.create_psi(basis, univ_p_val)
 
                         # Prediction
                         if self.bootstrap_method != 'fast' or b_i == 0:
                             # with error bar, i.e. use clf_poly
-                            clf_poly = self.clf_poly[f'b_{b_i + 1}'][output][in_key]
+                            clf_poly = self.clf_poly[f'b_{b_i+1}'][output][in_key]
                             try:
                                 y_mean, y_std = clf_poly.predict(
                                     psi, return_std=True
-                                )
+                                    )
                             except TypeError:
                                 y_mean = clf_poly.predict(psi)
                                 y_std = np.zeros_like(y_mean)
                         else:
                             # without error bar
-                            coeffs = self.coeffs_dict[f'b_{b_i + 1}'][output][in_key]
+                            coeffs = self.coeffs_dict[f'b_{b_i+1}'][output][in_key]
                             y_mean = np.dot(psi, coeffs)
                             y_std = np.zeros_like(y_mean)
 
@@ -1242,7 +1252,7 @@ class MetaModel:
 
                 # Save predictions for each output
                 if self.dim_red_method.lower() == 'pca':
-                    PCA = self.pca[f'b_{b_i + 1}'][output]
+                    PCA = self.pca[f'b_{b_i+1}'][output]
                     mean_pred[output] = PCA.inverse_transform(mean)
                     std_pred[output] = np.zeros(mean.shape)
                 else:
@@ -1273,13 +1283,12 @@ class MetaModel:
             if self.n_bootstrap_itrs > 1:
                 std_pred[output] = np.std(outs, axis=0)
             else:
-                # TODO: this b_i seems off here
                 std_pred[output] = std_pred_b[b_i][output]
 
         return mean_pred, std_pred
 
     # -------------------------------------------------------------------------
-    def create_model_error(self, X, y, MeasuredData):
+    def create_model_error(self, X, y, Model, name='Calib'):
         """
         Fits a GPE-based model error.
 
@@ -1290,7 +1299,8 @@ class MetaModel:
              extracted data.
         y : array of shape (n_outputs,)
             The model response for the MAP parameter set.
-        MeasuredData :
+        name : str, optional
+            Calibration or validation. The default is `'Calib'`.
 
         Returns
         -------
@@ -1305,7 +1315,7 @@ class MetaModel:
 
         # Read data
         # TODO: do this call outside the metamodel
-        # MeasuredData = Model.read_observation(case=name)
+        MeasuredData = Model.read_observation(case=name)
 
         # Fitting GPR based bias model
         for out in outputNames:
@@ -1321,7 +1331,7 @@ class MetaModel:
             delta = data  # - y[out][0]
             BiasInputs = np.hstack((X[out], y[out].reshape(-1, 1)))
             X_S = scaler.fit_transform(BiasInputs)
-            gp = gaussian_process_emulator(X_S, delta)
+            gp = self.gaussian_process_emulator(X_S, delta)
 
             self.errorScale[out]["y_1"] = scaler
             self.errorclf_poly[out]["y_1"] = gp
@@ -1403,13 +1413,13 @@ class MetaModel:
         """
         # TODO: what properties should be moved to the new object?
         new_MetaModelOpts = copy.deepcopy(self)
-        new_MetaModelOpts.input_obj = self.input_obj  # InputObj
+        new_MetaModelOpts.input_obj = self.input_obj#InputObj
         new_MetaModelOpts.InputSpace = self.InputSpace
-        # new_MetaModelOpts.InputSpace.meta_Model = 'aPCE'
-        # new_MetaModelOpts.InputSpace.InputObj = self.input_obj
-        # new_MetaModelOpts.InputSpace.ndim = len(self.input_obj.Marginals)
+        #new_MetaModelOpts.InputSpace.meta_Model = 'aPCE'
+        #new_MetaModelOpts.InputSpace.InputObj = self.input_obj
+        #new_MetaModelOpts.InputSpace.ndim = len(self.input_obj.Marginals)
         new_MetaModelOpts.n_params = len(self.input_obj.Marginals)
-        # new_MetaModelOpts.InputSpace.hdf5_file = None
+        #new_MetaModelOpts.InputSpace.hdf5_file = None
 
         return new_MetaModelOpts
 
@@ -1429,15 +1439,15 @@ class MetaModel:
         Returns
         -------
         deg_array: array
-            The selected degrees.
+            Array containing the arrays.
 
         """
         # Define the deg_array
         max_deg = np.max(self.pce_deg)
         min_Deg = np.min(self.pce_deg)
-
+        
         # TODO: remove the options for sequential?
-        nitr = n_samples - self.InputSpace.n_init_samples
+        #nitr = n_samples - self.InputSpace.n_init_samples
 
         # Check q-norm
         if not np.isscalar(self.pce_q_norm):
@@ -1445,61 +1455,49 @@ class MetaModel:
         else:
             self.pce_q_norm = np.array([self.pce_q_norm])
 
-        # def M_uptoMax(maxDeg):
-        #    n_combo = np.zeros(maxDeg)
-        #    for i, d in enumerate(range(1, maxDeg + 1)):
-        #        n_combo[i] = math.factorial(ndim + d)
-        #        n_combo[i] /= math.factorial(ndim) * math.factorial(d)
-        #    return n_combo
+        def M_uptoMax(maxDeg):
+            n_combo = np.zeros(maxDeg)
+            for i, d in enumerate(range(1, maxDeg+1)):
+                n_combo[i] = math.factorial(ndim+d)
+                n_combo[i] /= math.factorial(ndim) * math.factorial(d)
+            return n_combo
 
         deg_new = max_deg
-        # d = nitr if nitr != 0 and self.n_params > 5 else 1
+        #d = nitr if nitr != 0 and self.n_params > 5 else 1
         # d = 1
         # min_index = np.argmin(abs(M_uptoMax(max_deg)-ndim*n_samples*d))
         # deg_new = range(1, max_deg+1)[min_index]
 
         if deg_new > min_Deg and self.pce_reg_method.lower() != 'fastard':
-            deg_array = np.arange(min_Deg, deg_new + 1)
+            deg_array = np.arange(min_Deg, deg_new+1)
         else:
             deg_array = np.array([deg_new])
 
         return deg_array
 
     def generate_polynomials(self, max_deg=None):
-        """
-        Generates (univariate) polynomials.
-
-        Parameters
-        ----------
-        max_deg : int
-            Maximum polynomial degree.
-
-        Returns
-        -------
-        None
-        """
         # Check for InputSpace
-        if self.InputSpace is None:
+        if not hasattr(self, 'InputSpace'):
             raise AttributeError('Generate or add InputSpace before generating polynomials')
-
+            
         ndim = self.InputSpace.ndim
         # Create orthogonal polynomial coefficients if necessary
-        if (self.meta_model_type.lower() != 'gpe') and max_deg is not None:  # and self.input_obj.poly_coeffs_flag:
+        if (self.meta_model_type.lower()!='gpe') and max_deg is not None:# and self.input_obj.poly_coeffs_flag:
             self.polycoeffs = {}
             for parIdx in tqdm(range(ndim), ascii=True,
                                desc="Computing orth. polynomial coeffs"):
                 poly_coeffs = apoly_construction(
                     self.InputSpace.raw_data[parIdx],
                     max_deg
-                )
-                self.polycoeffs[f'p_{parIdx + 1}'] = poly_coeffs
+                    )
+                self.polycoeffs[f'p_{parIdx+1}'] = poly_coeffs
         else:
             raise AttributeError('MetaModel cannot generate polynomials in the given scenario!')
 
     # -------------------------------------------------------------------------
     def _compute_pce_moments(self):
         """
-        Computes the first two moments using the PCE-based metamodel.
+        Computes the first two moments using the PCE-based meta-model.
 
         Returns
         -------
@@ -1509,11 +1507,11 @@ class MetaModel:
             The second moment (standard deviation) of the surrogate.
 
         """
-
-        # Check if it's truly a pce-surrogate
+        
+        # Check if its truly a pce-surrogate
         if self.meta_model_type.lower() == 'gpe':
             raise AttributeError('Moments can only be computed for pce-type surrogates')
-
+        
         outputs = self.out_names
         pce_means_b = {}
         pce_stds_b = {}
@@ -1521,7 +1519,7 @@ class MetaModel:
         # Loop over bootstrap iterations
         for b_i in range(self.n_bootstrap_itrs):
             # Loop over the metamodels
-            coeffs_dicts = self.coeffs_dict[f'b_{b_i + 1}'].items()
+            coeffs_dicts = self.coeffs_dict[f'b_{b_i+1}'].items()
             means = {}
             stds = {}
             for output, coef_dict in coeffs_dicts:
@@ -1531,20 +1529,20 @@ class MetaModel:
 
                 for index, values in coef_dict.items():
                     idx = int(index.split('_')[1]) - 1
-                    coeffs = self.coeffs_dict[f'b_{b_i + 1}'][output][index]
+                    coeffs = self.coeffs_dict[f'b_{b_i+1}'][output][index]
 
                     # Mean = c_0
                     if coeffs[0] != 0:
                         pce_mean[idx] = coeffs[0]
                     else:
-                        clf_poly = self.clf_poly[f'b_{b_i + 1}'][output]
+                        clf_poly = self.clf_poly[f'b_{b_i+1}'][output]
                         pce_mean[idx] = clf_poly[index].intercept_
                     # Var = sum(coeffs[1:]**2)
                     pce_var[idx] = np.sum(np.square(coeffs[1:]))
 
                 # Save predictions for each output
                 if self.dim_red_method.lower() == 'pca':
-                    PCA = self.pca[f'b_{b_i + 1}'][output]
+                    PCA = self.pca[f'b_{b_i+1}'][output]
                     means[output] = PCA.inverse_transform(pce_mean)
                     stds[output] = PCA.inverse_transform(np.sqrt(pce_var))
                 else:
diff --git a/tests/test_BayesInference.py b/tests/test_BayesInference.py
deleted file mode 100644
index 2f22f9158..000000000
--- a/tests/test_BayesInference.py
+++ /dev/null
@@ -1,1105 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Test the BayesInference class for bayesvalidrox
-
-Tests are available for the following functions
-    _logpdf                 - x
-    _kernel_rbf             - x
-class BayesInference:
-    setup_inference         - x
-    create_inference        - x
-    perform_bootstrap       Need working model for tests without emulator
-    _perturb_data           - x
-    create_error_model      Error in the MetaModel
-    _eval_model             Need working model to test this
-    normpdf                 - x
-    _corr_factor_BME_old    - removed
-    _corr_factor_BME        - x
-    _rejection_sampling     - x
-    _posterior_predictive   - x
-    plot_post_params        - x 
-    plot_log_BME            - x
-    _plot_max_a_posteriori  Need working model to test this
-    _plot_post_predictive   - x
-"""
-import sys
-import pytest
-import numpy as np
-import pandas as pd
-
-from bayesvalidrox.surrogate_models.inputs import Input
-from bayesvalidrox.surrogate_models.exp_designs import ExpDesigns
-from bayesvalidrox.surrogate_models.surrogate_models import MetaModel
-from bayesvalidrox.pylink.pylink import PyLinkForwardModel as PL
-from bayesvalidrox.surrogate_models.engine import Engine
-from bayesvalidrox.bayes_inference.discrepancy import Discrepancy
-from bayesvalidrox.bayes_inference.mcmc import MCMC
-from bayesvalidrox.bayes_inference.bayes_inference import BayesInference
-from bayesvalidrox.bayes_inference.bayes_inference import _logpdf, _kernel_rbf
-
-sys.path.append("src/")
-sys.path.append("../src/")
-
-
-#%% Test _logpdf
-
-def test_logpdf() -> None:
-    """
-    Calculate loglikelihood
-
-    """
-    _logpdf([0], [0], [1])
-
-
-#%% Test _kernel_rbf
-
-def test_kernel_rbf() -> None:
-    """
-    Create RBF kernel
-    """
-    X = [[0, 0], [1, 1.5]]
-    pars = [1, 0.5, 1]
-    _kernel_rbf(X, pars)
-
-
-def test_kernel_rbf_lesspar() -> None:
-    """
-    Create RBF kernel with too few parameters
-    """
-    X = [[0, 0], [1, 1.5]]
-    pars = [1, 2]
-    with pytest.raises(AttributeError) as excinfo:
-        _kernel_rbf(X, pars)
-    assert str(excinfo.value) == 'Provide 3 parameters for the RBF kernel!'
-
-
-#%% Test MCMC init
-
-def test_BayesInference() -> None:
-    """
-    Construct a BayesInference object
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mod = PL()
-    mm = MetaModel(inp)
-    expdes = ExpDesigns(inp)
-    engine = Engine(mm, mod, expdes)
-    BayesInference(engine)
-
-
-#%% Test create_inference
-# TODO: disabled this test!
-def test_create_inference() -> None:
-    """
-    Run inference
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.x_values = np.array([0])  # Error in plots if this is not available
-
-    mm = MetaModel(inp)
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}  # Error if x_values not given
-    mod.Output.names = ['Z']
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts  # Error if this not class 'DiscrepancyOpts' or dict(?)
-    bi.bootstrap = True  # Error if this and bayes_loocv and just_analysis are all False?
-    bi.plot_post_pred = False  # Remaining issue in the violinplot
-    bi.create_inference()
-    # Remaining issue in the violinplot in plot_post_predictive
-
-
-#%% Test rejection_sampling
-def test_rejection_sampling_nologlik() -> None:
-    """
-    Perform rejection sampling without given log likelihood
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mod = PL()
-    mm = MetaModel(inp)
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    engine = Engine(mm, mod, expdes)
-    bi = BayesInference(engine)
-    bi.prior_samples = expdes.generate_samples(100, 'random')
-    with pytest.raises(AttributeError) as excinfo:
-        bi._rejection_sampling()
-    assert str(excinfo.value) == 'No log-likelihoods available!'
-
-
-def test_rejection_sampling_noprior() -> None:
-    """
-    Perform rejection sampling without prior samples
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mod = PL()
-    mm = MetaModel(inp)
-    expdes = ExpDesigns(inp)
-    engine = Engine(mm, mod, expdes)
-    bi = BayesInference(engine)
-    with pytest.raises(AttributeError) as excinfo:
-        bi._rejection_sampling()
-    assert str(excinfo.value) == 'No prior samples available!'
-
-
-def test_rejection_sampling() -> None:
-    """
-    Perform rejection sampling
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mod = PL()
-    mm = MetaModel(inp)
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    engine = Engine(mm, mod, expdes)
-    bi = BayesInference(engine)
-    bi.prior_samples = expdes.generate_samples(100, 'random')
-    bi.log_likes = np.swapaxes(np.atleast_2d(np.log(np.random.random(100) * 3)), 0, 1)
-    bi._rejection_sampling()
-
-
-#%% Test _perturb_data
-
-def test_perturb_data() -> None:
-    """
-    Perturb data
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mod = PL()
-    mm = MetaModel(inp)
-    expdes = ExpDesigns(inp)
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    data = pd.DataFrame()
-    data['Z'] = [0.45]
-    bi._perturb_data(data, ['Z'])
-
-
-def test_perturb_data_loocv() -> None:
-    """
-    Perturb data with bayes_loocv
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mod = PL()
-    mm = MetaModel(inp)
-    expdes = ExpDesigns(inp)
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    data = pd.DataFrame()
-    data['Z'] = [0.45]
-    bi.bayes_loocv = True
-    bi._perturb_data(data, ['Z'])
-
-
-#%% Test _eval_model
-
-def test_eval_model() -> None:
-    """
-    Run model with descriptive key
-    """
-    # TODO: need functioning example model to test this
-    None
-
-
-#%% Test corr_factor_BME
-
-def test_corr_factor_BME() -> None:
-    """
-    Calculate correction factor
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-
-    mm = MetaModel(inp)
-    mm.fit(expdes.X, expdes.Y)
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    obs_data = {'Z': np.array([0.45])}
-    total_sigma2s = {'Z': np.array([0.15])}
-    logBME = [0, 0, 0]
-
-    bi = BayesInference(engine)
-    bi.selected_indices = {'Z': 0}
-    bi._corr_factor_BME(obs_data, total_sigma2s, logBME)
-
-
-def test_corr_factor_BME_selectedindices() -> None:
-    """
-    Calculate correction factor
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-
-    mm = MetaModel(inp)
-    mm.fit(expdes.X, expdes.Y)
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    obs_data = {'Z': np.array([0.45])}
-    total_sigma2s = {'Z': np.array([0.15])}
-    logBME = [0, 0, 0]
-
-    bi = BayesInference(engine)
-    bi.selected_indices = {'Z': 0}
-    bi._corr_factor_BME(obs_data, total_sigma2s, logBME)
-
-
-#%% Test normpdf
-
-def test_normpdf_nosigmas() -> None:
-    """
-    Run normpdf without any additional sigmas
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': np.array([[0.4], [0.5], [0.45]])}
-
-    mm = MetaModel(inp)
-    mod = PL()
-    mod.Output.names = ['Z']
-    engine = Engine(mm, mod, expdes)
-
-    obs_data = {'Z': np.array([0.45])}
-    total_sigma2s = {'Z': np.array([0.15])}
-
-    bi = BayesInference(engine)
-    bi.normpdf(expdes.Y, obs_data, total_sigma2s, sigma2=None, std=None)
-
-
-def test_normpdf_sigma2() -> None:
-    """
-    Run normpdf with sigma2
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': np.array([[0.4], [0.5], [0.45]])}
-
-    mm = MetaModel(inp)
-    mod = PL()
-    mod.Output.names = ['Z']
-    engine = Engine(mm, mod, expdes)
-
-    obs_data = {'Z': np.array([0.45])}
-    total_sigma2s = {'Z': np.array([0.15])}
-    sigma2 = [[0]]
-
-    bi = BayesInference(engine)
-    bi.normpdf(expdes.Y, obs_data, total_sigma2s, sigma2=sigma2, std=None)
-
-
-def test_normpdf_allsigmas() -> None:
-    """
-    Run normpdf with all additional sigmas
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': np.array([[0.4], [0.5], [0.45]])}
-
-    mm = MetaModel(inp)
-    mod = PL()
-    mod.Output.names = ['Z']
-    engine = Engine(mm, mod, expdes)
-
-    obs_data = {'Z': np.array([0.45])}
-    total_sigma2s = {'Z': np.array([0.15])}
-    sigma2 = [[0]]
-
-    bi = BayesInference(engine)
-    bi.normpdf(expdes.Y, obs_data, total_sigma2s, sigma2=sigma2, std=total_sigma2s)
-
-
-#%% Test setup_inference
-
-def test_setup_inference_noobservation() -> None:
-    """
-    Test the object setup without given observations
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-
-    mod = PL()
-    mod.Output.names = ['Z']
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    with pytest.raises(Exception) as excinfo:
-        bi.setup_inference()
-    assert str(
-        excinfo.value) == ('Please provide the observation data as a dictionary via observations attribute or pass the '
-                           'csv-file path to MeasurementFile attribute')
-
-
-def test_setup_inference() -> None:
-    """
-    Test the object setup with observations
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}
-    mod.Output.names = ['Z']
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.setup_inference()
-
-
-def test_setup_inference_priorsamples() -> None:
-    """
-    Test the object setup with prior samples set by hand
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}
-    mod.Output.names = ['Z']
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.prior_samples = np.swapaxes(np.array([np.random.normal(0, 1, 100)]), 0, 1)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.setup_inference()
-
-
-def test_setup_inference_valid() -> None:
-    """
-    Test the object setup for valid
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-
-    mod = PL()
-    mod.observations_valid = {'Z': np.array([0.45])}
-    mod.observations_valid = {'Z': np.array([0.45]), 'x_values': np.array([0])}
-    mod.Output.names = ['Z']
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.name = 'valid'
-    bi.setup_inference()
-
-
-def test_setup_inference_noname() -> None:
-    """
-    Test the object setup for an invalid inference name
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}
-    mod.Output.names = ['Z']
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.name = ''
-    with pytest.raises(Exception) as excinfo:
-        bi.setup_inference()
-    assert str(excinfo.value) == 'The set inference type is not known! Use either `calib` or `valid`'
-
-
-#%% Test perform_bootstrap
-
-def test_perform_bootstrap() -> None:
-    """
-    Do bootstrap
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.x_values = np.array([0])  # Error in plots if this is not available
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}  # Error if x_values not given
-    mod.Output.names = ['Z']
-    mod.n_obs = 1
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.bootstrap = True
-    bi.plot_post_pred = False
-    total_sigma2s = {'Z': np.array([0.15])}
-    bi.setup_inference()
-    bi.perform_bootstrap(total_sigma2s)
-
-
-def test_perform_bootstrap_bayesloocv() -> None:
-    """
-    Do bootstrap
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.x_values = np.array([0])  # Error in plots if this is not available
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}  # Error if x_values not given
-    mod.Output.names = ['Z']
-    mod.n_obs = 1
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.bootstrap = True
-    bi.plot_post_pred = False
-    total_sigma2s = {'Z': np.array([0.15])}
-    bi.setup_inference()
-    bi.bayes_loocv = True
-    bi.perform_bootstrap(total_sigma2s)
-
-
-#%% Test create_error_model
-
-def create_error_model_prior() -> None:
-    """ 
-    Test creating MetaModel error-model for 'prior'
-    """
-    # TODO: there are issues with the expected formats from the MetaModel
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}
-    mod.Output.names = ['Z']
-    mod.n_obs = 1
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.bootstrap = True
-    bi.setup_inference()
-    bi.bias_inputs = expdes.X
-    bi.create_error_model(type_='prior', opt_sigma='B', sampler=None)
-
-
-def create_error_model_posterior() -> None:
-    """ 
-    Test creating MetaModel error-model for 'posterior'
-    """
-    # TODO: there are issues with the expected formats from the MetaModel
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}
-    mod.Output.names = ['Z']
-    mod.n_obs = 1
-
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    posterior = pd.DataFrame()
-    posterior[None] = [0, 1, 0.5]
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.bootstrap = True
-    bi.setup_inference()
-    bi.bias_inputs = expdes.X
-    bi.posterior_df = posterior
-    bi.create_error_model(type_='posterior', opt_sigma='B', sampler=None)
-
-
-#%% Test _posterior_predictive
-
-def test_posterior_predictive() -> None:
-    """
-    Test posterior predictions
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    prior_samples = np.swapaxes(np.array([np.random.normal(0, 1, 10)]), 0, 1)
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.x_values = np.array([0])  # Error in plots if this is not available
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-    y_hat, y_std = mm.eval_metamodel(prior_samples)
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}  # Error if x_values not given
-    mod.Output.names = ['Z']
-    mod.n_obs = 1
-
-    engine = Engine(mm, mod, expdes)
-
-    total_sigma2s = {'Z': np.array([0.15])}
-    posterior = pd.DataFrame()
-    posterior[None] = [0, 1, 0.5]
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.bootstrap = True
-    bi.plot_post_pred = False
-    bi.posterior_df = posterior
-    bi.bias_inputs = expdes.X
-    bi._mean_pce_prior_pred = y_hat
-    bi._std_pce_prior_pred = y_std
-    bi.Discrepancy.total_sigma2 = total_sigma2s
-    bi.setup_inference()
-    bi._posterior_predictive()
-
-
-def test_posterior_predictive_rejection() -> None:
-    """
-    Test posterior predictions with rejection inference
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    prior_samples = np.swapaxes(np.array([np.random.normal(0, 1, 10)]), 0, 1)
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.x_values = np.array([0])  # Error in plots if this is not available
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-    y_hat, y_std = mm.eval_metamodel(prior_samples)
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}  # Error if x_values not given
-    mod.Output.names = ['Z']
-    mod.n_obs = 1
-
-    engine = Engine(mm, mod, expdes)
-
-    total_sigma2s = {'Z': np.array([0.15])}
-    posterior = pd.DataFrame()
-    posterior[None] = [0, 1, 0.5]
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts
-    bi.bootstrap = True
-    bi.plot_post_pred = False
-    bi.posterior_df = posterior
-    bi.bias_inputs = expdes.X
-    bi._mean_pce_prior_pred = y_hat
-    bi._std_pce_prior_pred = y_std
-    bi.Discrepancy.total_sigma2 = total_sigma2s
-    bi.inference_method = 'rejection'
-    bi.setup_inference()
-    bi._posterior_predictive()
-
-
-#%% Test plot_post_params
-
-def test_plot_post_params() -> None:
-    """
-    Plot posterior dist
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    posterior = pd.DataFrame()
-    posterior[None] = [0, 1, 0.5]
-    bi.posterior_df = posterior
-    bi.plot_post_params('B')
-
-
-def test_plot_post_params_noemulator() -> None:
-    """
-    Plot posterior dist with emulator = False
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    posterior = pd.DataFrame()
-    posterior[None] = [0, 1, 0.5]
-    bi.posterior_df = posterior
-    bi.emulator = False
-    bi.plot_post_params('B')
-
-
-#%% Test plot_log_BME
-
-def test_plot_log_BME() -> None:
-    """
-    Show the log_BME from bootstrapping
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    bi.log_BME = np.array([[0, 0.2], [0, 0.2]])
-    bi.n_tot_measurement = 1
-    bi.plot_log_BME()
-
-
-def test_plot_log_BME_noemulator() -> None:
-    """
-    Show the log_BME from bootstrapping with emulator = False
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    bi.log_BME = np.array([[0, 0.2], [0, 0.2]])
-    bi.n_tot_measurement = 1
-    bi.emulator = False
-    bi.plot_log_BME()
-
-
-#%% Test _plot_max_a_posteriori
-
-def test_plot_max_a_posteriori_rejection() -> None:
-    """
-    Plot MAP estimate for rejection
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    bi.inference_method = 'rejection'
-    bi._plot_post_predictive()
-
-
-def test_plot_max_a_posteriori() -> None:
-    """
-    Plot MAP estimate
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    bi._plot_post_predictive()
-
-
-#%% Test _plot_post_predictive
-
-
-def test_plot_post_predictive_rejection() -> None:
-    """
-    Plot posterior predictions for rejection
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    bi.inference_method = 'rejection'
-    bi._plot_post_predictive()
-
-
-def test_plot_post_predictive() -> None:
-    """
-    Plot posterior predictions
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.n_init_samples = 2
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mod = PL()
-    engine = Engine(mm, mod, expdes)
-
-    bi = BayesInference(engine)
-    bi._plot_post_predictive()
-
-
-#%% Main runs
-if __name__ == '__main__':
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    # prior_samples = np.swapaxes(np.array([np.random.normal(0,1,10)]),0,1)
-
-    expdes = ExpDesigns(inp)
-    expdes.init_param_space(max_deg=1)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.x_values = np.array([0])  # Error in plots if this is not
-
-    mm = MetaModel(inp)
-    mm.n_params = 1
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(1))
-    # y_hat, y_std = mm.eval_metamodel(prior_samples)
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45])}
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}  # Error if x_values not given
-    mod.Output.names = ['Z']
-    mod.n_obs = 1
-
-    engine = Engine(mm, mod, expdes)
-
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
-    obsData = pd.DataFrame({'Z': np.array([0.45]), 'x_values': np.array([0])}, columns=mod.Output.names)
-    DiscrepancyOpts = Discrepancy('')
-    DiscrepancyOpts.type = 'Gaussian'
-    DiscrepancyOpts.parameters = (obsData * 0.15) ** 2
-    DiscrepancyOpts.opt_sigma = 'B'
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = DiscrepancyOpts  # Error if this not class 'DiscrepancyOpts' or dict(?)
-    bi.bootstrap = True  # Error if this and bayes_loocv and just_analysis are all False?
-    bi.plot_post_pred = False  # Remaining issue in the violinplot
-    bi.error_model = False
-    bi.bayes_loocv = True
-    if 1:
-        bi.create_inference()
-    # opt_sigma = 'B'
-    # total_sigma2s = {'Z':np.array([0.15])}
-    # data = pd.DataFrame()
-    # data['Z'] = [0.45]
-    # data['x_values'] = [0.3]
-    # bi.setup_inference()
-    # bi.perform_bootstrap(total_sigma2s)
-    posterior = pd.DataFrame()
-    posterior[None] = [0, 1, 0.5]
-    bi.posterior_df = posterior
-    # bi.bias_inputs = expdes.X
-    # bi._mean_pce_prior_pred = y_hat
-    # bi._std_pce_prior_pred = y_std
-    # bi.Discrepancy.total_sigma2 = total_sigma2s
-    # bi.create_error_model(type_ = 'posterior', opt_sigma = 'B', sampler = None)
-    # bi._posterior_predictive()
-    # bi.plot_post_params('B')
-    # bi.log_BME = np.array([[0,0.2],[0,0.2]])
-    # bi.n_tot_measurement = 1
-    # bi.plot_log_BME()
-    bi.inference_method = 'rejection'
-    bi._plot_max_a_posteriori()
diff --git a/tests/test_BayesModelComparison.py b/tests/test_BayesModelComparison.py
deleted file mode 100644
index 91f328ec7..000000000
--- a/tests/test_BayesModelComparison.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Test the BayesModelComparison class in bayesvalidrox.
-Tests are available for the following functions
-Class BayesModelComparison: 
-    create_model_comparison
-    compare_models
-    generate_dataset
-    __perturb_data
-    cal_model_weight
-    plot_just_analysis
-    plot_model_weights
-    plot_bayes_factor
-    
-"""
-import sys
-sys.path.append("src/")
-import pytest
-import numpy as np
-
-from bayesvalidrox.bayes_inference.bayes_model_comparison import BayesModelComparison
-#from bayesvalidrox.surrogate_models.input_space import InputSpace
-
-def test_BMC() -> None:
-    """
-    Build BMC without inputs
-    """
-    BayesModelComparison()
\ No newline at end of file
diff --git a/tests/test_Discrepancy.py b/tests/test_Discrepancy.py
index 7fb948d90..c46e0a137 100644
--- a/tests/test_Discrepancy.py
+++ b/tests/test_Discrepancy.py
@@ -36,8 +36,22 @@ def test_get_sample() -> None:
     """
     Get discrepancy sample
     """
-    disc = Discrepancy()
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'normal'
+    inp.Marginals[0].parameters = [0,1]
+    disc = Discrepancy(InputDisc = inp)
     with pytest.raises(AttributeError) as excinfo:
         disc.get_sample(2)
     assert str(excinfo.value) == 'Cannot create new samples, please provide input distributions'
-    
\ No newline at end of file
+    
+    
+    
+    
+if __name__ == '__main__':
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'normal'
+    inp.Marginals[0].parameters = [0,1]
+    disc = Discrepancy(InputDisc = inp)
+    disc.get_sample(2)
\ No newline at end of file
diff --git a/tests/test_ExpDesign.py b/tests/test_ExpDesign.py
index 68255b338..42f87663c 100644
--- a/tests/test_ExpDesign.py
+++ b/tests/test_ExpDesign.py
@@ -131,47 +131,6 @@ def test_random_sampler() -> None:
     exp = ExpDesigns(inp)
     exp.random_sampler(4)
     
-def test_random_sampler_largedatanoJDist() -> None:
-    """
-    Sample randomly, init_param_space implicitly, more samples wanted than given, no JDist available
-    """
-    x = np.random.uniform(0,1,1000)
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].input_data = x
-    exp = ExpDesigns(inp)
-    with pytest.raises(AttributeError) as excinfo:
-        exp.random_sampler(100000) 
-    assert str(excinfo.value) == 'Sampling cannot proceed, build ExpDesign with max_deg != 0 to create JDist!'
-    
-def test_random_sampler_largedataJDist0() -> None:
-    """
-    Sample randomly, init_param_space implicitly, more samples wanted than given, 
-    JDist available, priors given via samples
-    """
-    x = np.random.uniform(0,1,1000)
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].input_data = x
-    exp = ExpDesigns(inp)
-    exp.init_param_space(max_deg = 1)
-    exp.random_sampler(100000) 
-    
-def test_random_sampler_largedataJDist1() -> None:
-    """
-    Sample randomly, init_param_space implicitly, more samples wanted than given, 
-    JDist available, prior distributions given
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0,1]
-    exp = ExpDesigns(inp)
-    exp.init_param_space(max_deg = 1)
-    exp.random_sampler(100000) 
-     
-        
-        
 def test_random_sampler_rawdata() -> None:
     """
     Sample randomly, init_param_space implicitly, has 2d raw data
@@ -379,7 +338,7 @@ def test_read_from_file_wrongcomp():
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = ExpDesigns(inp, sampling_method = 'user')
-    exp.hdf5_file = 'ExpDesign_testfile.hdf5'
+    exp.hdf5_file = 'tests/ExpDesign_testfile.hdf5'
     with pytest.raises(KeyError) as excinfo:
         exp.read_from_file(['Out'])
     assert str(excinfo.value) == "'Unable to open object (component not found)'"
@@ -393,5 +352,13 @@ def test_read_from_file():
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = ExpDesigns(inp, sampling_method = 'user')
-    exp.hdf5_file = 'ExpDesign_testfile.hdf5'
+    exp.hdf5_file = 'tests/ExpDesign_testfile.hdf5'
     exp.read_from_file(['Z'])
+    
+if __name__ == '__main__':
+    x = np.random.uniform(0,1,1000)
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].input_data = x
+    exp = ExpDesigns(inp, sampling_method = 'PCM')
+    exp.generate_ED(4)
\ No newline at end of file
diff --git a/tests/test_Input.py b/tests/test_Input.py
index 41c0e5ab8..84b9b239c 100644
--- a/tests/test_Input.py
+++ b/tests/test_Input.py
@@ -9,10 +9,10 @@ Class Input:
 @author: Rebecca Kohlhaas
 """
 import sys
-
-from bayesvalidrox.surrogate_models.inputs import Input
-
 sys.path.append("src/")
+import pytest
+
+from bayesvalidrox.surrogate_models.inputs import Marginal, Input
 
 
 def test_addmarginals() -> None:
diff --git a/tests/test_InputSpace.py b/tests/test_InputSpace.py
index ae31f8e90..1b5a28fa3 100644
--- a/tests/test_InputSpace.py
+++ b/tests/test_InputSpace.py
@@ -10,16 +10,13 @@ Class InputSpace:
 
 """
 import sys
+sys.path.append("src/")
 import pytest
 import numpy as np
 
 from bayesvalidrox.surrogate_models.inputs import Input
 from bayesvalidrox.surrogate_models.input_space import InputSpace
 
-sys.path.append("src/")
-sys.path.append("../src/")
-
-
 #%% Test ExpDesign.check_valid_input
 
 def test_check_valid_input_hasmarg() -> None:
@@ -31,7 +28,6 @@ def test_check_valid_input_hasmarg() -> None:
         InputSpace(inp)
     assert str(excinfo.value) == 'Cannot build distributions if no marginals are given'
 
-
 def test_check_valid_input_haspriors() -> None:
     """
     Distribution not built if no distribution set for the marginals
@@ -40,43 +36,45 @@ def test_check_valid_input_haspriors() -> None:
     inp.add_marginals()
     with pytest.raises(AssertionError) as excinfo:
         InputSpace(inp)
-    assert str(excinfo.value) == 'Not all marginals were provided priors'
-
-
+    assert str(excinfo.value) ==  'Not all marginals were provided priors'
+    
 def test_check_valid_input_priorsmatch() -> None:
     """
     Distribution not built if dist types do not align
     """
-    x = np.random.uniform(0, 1, 1000)
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     inp.add_marginals()
     inp.Marginals[1].dist_type = 'normal'
-    inp.Marginals[1].parameters = [0, 1]
+    inp.Marginals[1].parameters = [0,1]
     with pytest.raises(AssertionError) as excinfo:
         InputSpace(inp)
     assert str(excinfo.value) == 'Distributions cannot be built as the priors have different types'
 
-
 def test_check_valid_input_samples() -> None:
     """
     Design built correctly - samples
     """
-    x = np.random.uniform(0, 1, 1000)
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     inp.add_marginals()
-    inp.Marginals[1].input_data = x + 2
-    InputSpace(inp)
-
+    inp.Marginals[1].input_data = x+2
+    try:
+        InputSpace(inp)
+    except AssertionError:
+        pytest.fail("ExpDesign raised AssertionError unexpectedly!")
+    # TODO: check for better options to assert that no error at all occurred
+    
 
 def test_check_valid_input_both() -> None:
     """
     Design no built - samples and dist type given
     """
-    x = np.random.uniform(0, 1, 1000)
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
@@ -85,8 +83,7 @@ def test_check_valid_input_both() -> None:
         InputSpace(inp)
     assert str(excinfo.value) == 'Both samples and distribution type are given. Please choose only one.'
 
-
-# def test_check_valid_input_distnotok() -> None:
+#def test_check_valid_input_distnotok() -> None:
 #    """
 #    Design built incorrectly - dist types without parameters
 #    """
@@ -98,7 +95,7 @@ def test_check_valid_input_both() -> None:
 #    with pytest.raises(AssertionError) as excinfo:
 #        exp = ExpDesigns(inp)
 #    assert str(excinfo.value) == 'Some distributions do not have characteristic values'
-
+    
 def test_check_valid_input_distok() -> None:
     """
     Design built correctly - dist types
@@ -106,13 +103,16 @@ def test_check_valid_input_distok() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     inp.add_marginals()
     inp.Marginals[1].dist_type = 'normal'
-    inp.Marginals[1].parameters = [0, 1]
-    InputSpace(inp)
-
-
+    inp.Marginals[1].parameters = [0,1]
+    try:
+        InputSpace(inp)
+    except AssertionError:
+        pytest.fail("ExpDesign raised AssertionError unexpectedly!")
+    # TODO: check for better options to assert that no error at all occurred
+    
 def test_check_valid_input_noapc() -> None:
     """
     Design built correctly - no apc
@@ -120,12 +120,11 @@ def test_check_valid_input_noapc() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     inp.add_marginals()
     inp.Marginals[1].dist_type = 'normal'
-    inp.Marginals[1].parameters = [0, 1]
-    InputSpace(inp, meta_Model_type='gpe')
-
+    inp.Marginals[1].parameters = [0,1]
+    InputSpace(inp, meta_Model_type = 'gpe')
 
 #%% Test ExpDesign.build_polytypes
 def test_build_polytypes_normalerr() -> None:
@@ -141,7 +140,6 @@ def test_build_polytypes_normalerr() -> None:
         exp.build_polytypes(False)
     assert str(excinfo.value) == 'Distribution has too few parameters!'
 
-
 def test_build_polytypes_normal() -> None:
     """
     Build dist 'normal'
@@ -149,11 +147,11 @@ def test_build_polytypes_normal() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     exp = InputSpace(inp)
     exp.build_polytypes(False)
-
-
+    
+    
 def test_build_polytypes_uniferr() -> None:
     """
     Build dist 'unif' - too few params
@@ -167,7 +165,6 @@ def test_build_polytypes_uniferr() -> None:
         exp.build_polytypes(False)
     assert str(excinfo.value) == 'Distribution has too few parameters!'
 
-
 def test_build_polytypes_unif() -> None:
     """
     Build dist 'unif'
@@ -175,11 +172,10 @@ def test_build_polytypes_unif() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'unif'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     exp = InputSpace(inp)
     exp.build_polytypes(False)
-
-
+    
 def test_build_polytypes_gammaerr() -> None:
     """
     Build dist 'gamma' - too few params
@@ -193,8 +189,6 @@ def test_build_polytypes_gammaerr() -> None:
         exp.build_polytypes(False)
     assert str(excinfo.value) == 'Distribution has too few parameters!'
 
-
-# noinspection SpellCheckingInspection
 def test_build_polytypes_gamma() -> None:
     """
     Build dist 'gamma'
@@ -202,14 +196,12 @@ def test_build_polytypes_gamma() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'gamma'
-    inp.Marginals[0].parameters = [0, 1, 0]
+    inp.Marginals[0].parameters = [0,1,0]
     exp = InputSpace(inp)
     with pytest.raises(ValueError) as excinfo:
         exp.build_polytypes(False)
     assert str(excinfo.value) == 'Parameter values are not valid, please set differently'
-
-
-# noinspection SpellCheckingInspection
+    
 def test_build_polytypes_betaerr() -> None:
     """
     Build dist 'beta' - too few params
@@ -223,7 +215,6 @@ def test_build_polytypes_betaerr() -> None:
         exp.build_polytypes(False)
     assert str(excinfo.value) == 'Distribution has too few parameters!'
 
-
 def test_build_polytypes_beta() -> None:
     """
     Build dist 'beta'
@@ -231,12 +222,11 @@ def test_build_polytypes_beta() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'beta'
-    inp.Marginals[0].parameters = [0.5, 1, 2, 3]
+    inp.Marginals[0].parameters = [0.5,1,2,3]
     exp = InputSpace(inp)
     exp.build_polytypes(False)
-
-
-# noinspection SpellCheckingInspection
+    
+        
 def test_build_polytypes_lognormerr() -> None:
     """
     Build dist 'lognorm' - too few params
@@ -250,7 +240,6 @@ def test_build_polytypes_lognormerr() -> None:
         exp.build_polytypes(False)
     assert str(excinfo.value) == 'Distribution has too few parameters!'
 
-
 def test_build_polytypes_lognorm() -> None:
     """
     Build dist 'lognorm'
@@ -258,11 +247,11 @@ def test_build_polytypes_lognorm() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'lognorm'
-    inp.Marginals[0].parameters = [0.5, 1, 2, 3]
+    inp.Marginals[0].parameters = [0.5,1,2,3]
     exp = InputSpace(inp)
     exp.build_polytypes(False)
-
-
+    
+        
 def test_build_polytypes_exponerr() -> None:
     """
     Build dist 'expon' - too few params
@@ -276,7 +265,6 @@ def test_build_polytypes_exponerr() -> None:
         exp.build_polytypes(False)
     assert str(excinfo.value) == 'Distribution has too few parameters!'
 
-
 def test_build_polytypes_expon() -> None:
     """
     Build dist 'expon'
@@ -284,11 +272,11 @@ def test_build_polytypes_expon() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'expon'
-    inp.Marginals[0].parameters = [0.5, 1, 2, 3]
+    inp.Marginals[0].parameters = [0.5,1,2,3]
     exp = InputSpace(inp)
     exp.build_polytypes(False)
-
-
+    
+        
 def test_build_polytypes_weibullerr() -> None:
     """
     Build dist 'weibull' - too few params
@@ -302,7 +290,6 @@ def test_build_polytypes_weibullerr() -> None:
         exp.build_polytypes(False)
     assert str(excinfo.value) == 'Distribution has too few parameters!'
 
-
 def test_build_polytypes_weibull() -> None:
     """
     Build dist 'weibull'
@@ -310,52 +297,50 @@ def test_build_polytypes_weibull() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'weibull'
-    inp.Marginals[0].parameters = [0.5, 1, 2, 3]
+    inp.Marginals[0].parameters = [0.5,1,2,3]
     exp = InputSpace(inp)
     exp.build_polytypes(False)
-
+    
 
 def test_build_polytypes_arbitrary() -> None:
     """
     Build poly 'arbitrary'
     """
-    x = np.random.uniform(0, 1, 1000)
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = InputSpace(inp)
     exp.build_polytypes(False)
-
-
+   
 def test_build_polytypes_rosenblatt() -> None:
     """
     Build dist with rosenblatt
     """
-    x = np.random.uniform(0, 1, 1000)
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = InputSpace(inp)
     exp.build_polytypes(True)
-
-
+    
 def test_build_polytypes_samples() -> None:
     """
     Build dist from samples
     """
-    x = np.random.uniform(0, 1, 1000)
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = InputSpace(inp)
     exp.build_polytypes(False)
-
-
+    
+    
 def test_build_polytypes_samples2d() -> None:
     """
     Build dist from samples - samples too high dim
     """
-    x = np.random.uniform(0, 1, (2, 1000))
+    x = np.random.uniform(0,1,(2,1000))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
@@ -363,22 +348,21 @@ def test_build_polytypes_samples2d() -> None:
     with pytest.raises(ValueError) as excinfo:
         exp.build_polytypes(False)
     assert str(excinfo.value) == 'The samples provided to the Marginals should be 1D only'
-
-
+    
+    
 #%% Test ExpDesign.init_param_space
 
 def test_init_param_space_nomaxdegsample() -> None:
     """
     Init param space without max_deg for given samples
     """
-    x = np.random.uniform(0, 1, 1000)
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = InputSpace(inp)
     exp.init_param_space()
 
-
 def test_init_param_space_nomaxdegdist() -> None:
     """
     Init param space without max_deg for given dist
@@ -386,23 +370,21 @@ def test_init_param_space_nomaxdegdist() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'expon'
-    inp.Marginals[0].parameters = [0.5, 1, 2, 3]
+    inp.Marginals[0].parameters = [0.5,1,2,3]
     exp = InputSpace(inp)
     exp.init_param_space()
-
-
+     
 def test_init_param_space_maxdeg() -> None:
     """
     Init param space with max_deg for given samples
     """
-    x = np.random.uniform(0, 1, 1000)
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
-
-
+    
 def test_init_param_space_maxdegdist() -> None:
     """
     Init param space with max_deg for given dist (not uniform)
@@ -410,11 +392,10 @@ def test_init_param_space_maxdegdist() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'expon'
-    inp.Marginals[0].parameters = [0.5, 1, 2, 3]
+    inp.Marginals[0].parameters = [0.5,1,2,3]
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
-
-
+     
 def test_init_param_space_maxdegdistunif() -> None:
     """
     Init param space with max_deg for given dist (uniform)
@@ -422,19 +403,20 @@ def test_init_param_space_maxdegdistunif() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'unif'
-    inp.Marginals[0].parameters = [0.5, 1, 2, 3]
+    inp.Marginals[0].parameters = [0.5,1,2,3]
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
-
-
+     
+    
+    
 #%% Test ExpDesign.transform
-
+    
 def test_transform_noparamspace() -> None:
     """
     Call transform without a built JDist
     """
-    x = np.random.uniform(0, 1, 1000)
-    y = np.random.uniform(0, 1, (2, 1000))
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(2,1000))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
@@ -442,13 +424,12 @@ def test_transform_noparamspace() -> None:
     with pytest.raises(AttributeError) as excinfo:
         exp.transform(y)
     assert str(excinfo.value) == 'Call function init_param_space first to create JDist'
-
-
+      
 def test_transform_dimerrlow() -> None:
     """
     Call transform with too few dimensions
     """
-    x = np.random.uniform(0, 1, 1000)
+    x = np.random.uniform(0,1,1000)
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
@@ -457,14 +438,13 @@ def test_transform_dimerrlow() -> None:
     with pytest.raises(AttributeError) as excinfo:
         exp.transform(x)
     assert str(excinfo.value) == 'X should have two dimensions'
-
-
+          
 def test_transform_dimerrhigh() -> None:
     """
     Call transform with too many dimensions
     """
-    x = np.random.uniform(0, 1, 1000)
-    y = np.random.uniform(0, 1, (1, 1, 1000))
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(1,1,1000))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
@@ -473,14 +453,13 @@ def test_transform_dimerrhigh() -> None:
     with pytest.raises(AttributeError) as excinfo:
         exp.transform(y)
     assert str(excinfo.value) == 'X should have two dimensions'
-
-
+    
 def test_transform_dimerr0() -> None:
     """
     Call transform with wrong X.shape[0]
     """
-    x = np.random.uniform(0, 1, 1000)
-    y = np.random.uniform(0, 1, (2, 1000))
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(2,1000))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
@@ -488,30 +467,27 @@ def test_transform_dimerr0() -> None:
     exp.init_param_space(max_deg=2)
     with pytest.raises(AttributeError) as excinfo:
         exp.transform(y)
-    assert str(
-        excinfo.value) == 'The second dimension of X should be the same size as the number of marginals in the InputObj'
-
-
+    assert str(excinfo.value) == 'The second dimension of X should be the same size as the number of marginals in the InputObj'
+   
 def test_transform_paramspace() -> None:
     """
     Transform successfully
     """
-    x = np.random.uniform(0, 1, 1000)
-    y = np.random.uniform(0, 1, (1000, 1))
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(1000,1))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
     exp.transform(y)
-
-
+  
 def test_transform_rosenblatt() -> None:
     """
     Transform with rosenblatt
     """
-    x = np.random.uniform(0, 1, 1000)
-    y = np.random.uniform(0, 1, (1000, 1))
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(1000,1))
     inp = Input()
     inp.Rosenblatt = True
     inp.add_marginals()
@@ -519,92 +495,86 @@ def test_transform_rosenblatt() -> None:
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
     exp.transform(y)
-
-
+  
 def test_transform_user() -> None:
     """
     Transform with method 'user'
     """
-    x = np.random.uniform(0, 1, 1000)
-    y = np.random.uniform(0, 1, (1000, 1))
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(1000,1))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
-    exp.transform(y, method='user')
-
-
-# noinspection SpellCheckingInspection
+    exp.transform(y, method = 'user')
+  
 def test_transform_rosenblattuser() -> None:
     """
     Transform with rosenblatt and method 'user'
     """
-    x = np.random.uniform(0, 1, 1000)
-    y = np.random.uniform(0, 1, (1000, 1))
+    x = np.random.uniform(0,1,1000)
+    y = np.random.uniform(0,1,(1000,1))
     inp = Input()
     inp.Rosenblatt = True
     inp.add_marginals()
     inp.Marginals[0].input_data = x
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
-    exp.transform(y, method='user')
-
-
+    exp.transform(y, method = 'user')
+  
 def test_transform_uniform() -> None:
     """
     Transform uniform dist
     """
-    y = np.random.uniform(0, 1, (1000, 1))
+    y = np.random.uniform(0,1,(1000,1))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'unif'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
     exp.transform(y)
-
-
+  
 def test_transform_norm() -> None:
     """
     Transform normal dist
     """
-    y = np.random.uniform(0, 1, (1000, 1))
+    y = np.random.uniform(0,1,(1000,1))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'norm'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
     exp.transform(y)
-
-
+  
 # TODO: what are these other params here???
 def test_transform_gammanoparam() -> None:
     """
     Transform gamma dist - no parameters
     """
-    y = np.random.uniform(0, 1, (1000, 1))
+    y = np.random.uniform(0,1,(1000,1))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'gamma'
-    inp.Marginals[0].parameters = [1, 1, 0]
+    inp.Marginals[0].parameters = [1,1,0]
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
     with pytest.raises(AttributeError) as excinfo:
         exp.transform(y)
     assert str(excinfo.value) == 'Additional parameters have to be set for the gamma distribution!'
-
-
+  
 def test_transform_gammaparam() -> None:
     """
     Transform gamma dist - with parameters
     """
-    y = np.random.uniform(0, 1, (1000, 1))
+    y = np.random.uniform(0,1,(1000,1))
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'gamma'
-    inp.Marginals[0].parameters = [1, 1, 0]
+    inp.Marginals[0].parameters = [1,1,0]
     exp = InputSpace(inp)
     exp.init_param_space(max_deg=2)
-    exp.transform(y, params=[1, 1])
+    exp.transform(y, params = [1,1])
+  
\ No newline at end of file
diff --git a/tests/test_MCMC.py b/tests/test_MCMC.py
deleted file mode 100644
index 3485a615b..000000000
--- a/tests/test_MCMC.py
+++ /dev/null
@@ -1,223 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
-Test the MCM class of bayesvalidrox
-Tests are available for the following functions
-_check_ranges           - x
-gelmain_rubin
-_iterative_scheme
-_my_ESS                 - x
-Class MCMC: 
-    run_sampler
-    log_prior
-    log_likelihood
-    log_posterior
-    eval_model
-    train_error_model
-    marginal_llk_emcee
-"""
-import sys
-import pandas as pd
-import numpy as np
-
-from bayesvalidrox.surrogate_models.inputs import Input
-from bayesvalidrox.surrogate_models.exp_designs import ExpDesigns
-from bayesvalidrox.surrogate_models.surrogate_models import MetaModel
-from bayesvalidrox.pylink.pylink import PyLinkForwardModel as PL
-from bayesvalidrox.surrogate_models.engine import Engine
-from bayesvalidrox.bayes_inference.discrepancy import Discrepancy
-from bayesvalidrox.bayes_inference.mcmc import MCMC
-from bayesvalidrox.bayes_inference.bayes_inference import BayesInference
-from bayesvalidrox.bayes_inference.mcmc import _check_ranges, gelman_rubin
-
-sys.path.append("src/")
-sys.path.append("../src/")
-
-
-#%% Test MCMC init
-
-def test_MCMC() -> None:
-    """
-    Construct an MCMC object
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.x_values = np.array([0])
-
-    mm = MetaModel(inp)
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}
-    mod.Output.names = ['Z']
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    disc = Discrepancy('')
-    disc.type = 'Gaussian'
-    disc.parameters = (obsData * 0.15) ** 2
-    disc.opt_sigma = 'B'
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = disc
-    bi.inference_method = 'mcmc'
-    bi.setup_inference()
-    MCMC(bi)
-
-
-#%% Test run_sampler
-
-def test_run_sampler() -> None:
-    """
-    Run short MCMC
-
-    Returns
-    -------
-    None
-        DESCRIPTION.
-
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.x_values = np.array([0])
-
-    mm = MetaModel(inp)
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}
-    mod.Output.names = ['Z']
-    engine = Engine(mm, mod, expdes)
-
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    disc = Discrepancy('')
-    disc.type = 'Gaussian'
-    disc.parameters = (obsData * 0.15) ** 2
-    disc.opt_sigma = 'B'
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = disc
-    bi.inference_method = 'mcmc'
-    bi.setup_inference()
-    total_sigma2s = {'Z': np.array([0.15])}
-    mcmc = MCMC(bi)
-    mcmc.nburn = 10
-    mcmc.nsteps = 50
-    mcmc.run_sampler(mod.observations, total_sigma2s)
-
-
-#%% Test log_prior
-
-#%% Test log_likelihood
-
-#%% Test log_posterior
-
-#%% Test eval_model
-
-#%% Test train_error_model
-
-#%% Test gelmain_rubin
-
-def test_gelman_rubin() -> None:
-    """
-    Calculate gelman-rubin
-    """
-    chain = [[[1], [2]]]
-    gelman_rubin(chain)
-
-
-def test_gelman_rubin_returnvar() -> None:
-    """
-    Calculate gelman-rubin returning var
-    """
-    chain = [[[1], [2]]]
-    gelman_rubin(chain, return_var=True)
-
-
-#%% Test marginal_llk_emcee
-
-#%% Test _check_ranges
-
-def test_check_ranges() -> None:
-    """
-    Check to see if theta lies in expected ranges
-    """
-    theta = [0.5, 1.2]
-    ranges = [[0, 1], [1, 2]]
-    assert _check_ranges(theta, ranges) is True
-
-
-def test_check_ranges_inv() -> None:
-    """
-    Check to see if theta lies not in expected ranges
-    """
-    theta = [1.5, 1.2]
-    ranges = [[0, 1], [1, 2]]
-    assert _check_ranges(theta, ranges) is False
-
-
-#%% Main
-
-if __name__ == '__main__':
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-
-    expdes = ExpDesigns(inp)
-    expdes.n_init_samples = 2
-    expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    # expdes.x_values = np.array([0]) #  Error in plots if this is not available
-
-    mm = MetaModel(inp)
-    mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
-
-    mod = PL()
-    mod.observations = {'Z': np.array([0.45]), 'x_values': np.array([0])}
-    mod.Output.names = ['Z']
-
-    engine = Engine(mm, mod, expdes)
-
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
-    obsData = pd.DataFrame(mod.observations, columns=mod.Output.names)
-    disc = Discrepancy('')
-    disc.type = 'Gaussian'
-    disc.parameters = (obsData * 0.15) ** 2
-    disc.opt_sigma = 'B'
-
-    bi = BayesInference(engine)
-    bi.Discrepancy = disc
-    bi.inference_method = 'mcmc'
-    bi.setup_inference()
-
-    # chain = [[[1],[2]]]
-    total_sigma2s = {'Z': np.array([0.15])}
-    mcmc = MCMC(bi)
-    mcmc.nsteps = 50
-    mcmc.nburn = 10
-    mcmc.run_sampler(mod.observations, total_sigma2s)
-    # mcmc.gelmain_rubin(chain)
-
-    chain = [[[1], [2]]]
-    gelman_rubin(chain, return_var=True)
diff --git a/tests/test_MetaModel.py b/tests/test_MetaModel.py
index a3f9b19d3..b5095fa34 100644
--- a/tests/test_MetaModel.py
+++ b/tests/test_MetaModel.py
@@ -26,16 +26,13 @@ Class MetaModel:
     
 """
 import numpy as np
-import pytest
 import sys
+sys.path.append("src/")
+import pytest
 
 from bayesvalidrox.surrogate_models.inputs import Input
 from bayesvalidrox.surrogate_models.input_space import InputSpace
-from bayesvalidrox.surrogate_models.surrogate_models import MetaModel, corr_loocv_error, create_psi
-from bayesvalidrox.surrogate_models.surrogate_models import gaussian_process_emulator
-
-sys.path.append("src/")
-
+from bayesvalidrox.surrogate_models.surrogate_models import MetaModel
 
 #%% Test MetaMod constructor on its own
 
@@ -46,10 +43,9 @@ def test_metamod() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     MetaModel(inp)
-
-
+    
 #%% Test MetaModel.build_metamodel
 
 def test_build_metamodel_nosamples() -> None:
@@ -59,12 +55,12 @@ def test_build_metamodel_nosamples() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     with pytest.raises(AttributeError) as excinfo:
         mm.build_metamodel()
     assert str(excinfo.value) == 'Please provide samples to the metamodel before building it.'
-
+    
 
 def test_build_metamodel() -> None:
     """
@@ -73,12 +69,11 @@ def test_build_metamodel() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.CollocationPoints = np.array([[0.2], [0.8]])
+    mm.CollocationPoints = [[0.2],[0.8]]
     mm.build_metamodel()
-
-
+    
 def test_build_metamodel_ninitsamples() -> None:
     """
     Build MetaModel with n_init_samples
@@ -86,12 +81,11 @@ def test_build_metamodel_ninitsamples() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.CollocationPoints = np.array([[0.2], [0.8]])
-    mm.build_metamodel(n_init_samples=2)
-
-
+    mm.CollocationPoints = [[0.2],[0.8]]
+    mm.build_metamodel(n_init_samples = 2)
+    
 def test_build_metamodel_gpe() -> None:
     """
     Build MetaModel gpe
@@ -99,13 +93,13 @@ def test_build_metamodel_gpe() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.meta_model_type = 'gpe'
-    mm.CollocationPoints = np.array([[0.2], [0.8]])
+    mm.CollocationPoints = [[0.2],[0.8]]
     mm.build_metamodel()
-
-
+    
+    
 def test_build_metamodel_coldimerr() -> None:
     """
     Build MetaModel with wrong shape collocation samples
@@ -113,31 +107,29 @@ def test_build_metamodel_coldimerr() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.CollocationPoints = [[0.2, 0.8]]
+    mm.CollocationPoints = [[0.2,0.8]]
     with pytest.raises(AttributeError) as excinfo:
         mm.build_metamodel()
-    assert str(
-        excinfo.value) == 'The second dimension of X should be the same size as the number of marginals in the InputObj'
+    assert str(excinfo.value) == 'The given samples do not match the given number of priors. The samples should be a 2D array of size (#samples, #priors)'
 
 
 #%% Test MetaMod.generate_polynomials
 
-def test_generate_polynomials_noexp() -> None:
+def test_generate_polynomials_noExp() -> None:
     """
     Generate polynomials without ExpDeg
     """
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     with pytest.raises(AttributeError) as excinfo:
         mm.generate_polynomials()
     assert str(excinfo.value) == 'Generate or add InputSpace before generating polynomials'
-
-
+    
 def test_generate_polynomials_nodeg() -> None:
     """
     Generate polynomials without max_deg
@@ -145,21 +137,21 @@ def test_generate_polynomials_nodeg() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-
+    
     # Setup
     mm.InputSpace = InputSpace(inp)
     mm.InputSpace.n_init_samples = 2
     mm.InputSpace.init_param_space(np.max(mm.pce_deg))
     mm.ndim = mm.InputSpace.ndim
     mm.n_params = len(mm.input_obj.Marginals)
-
+        
     # Generate
     with pytest.raises(AttributeError) as excinfo:
         mm.generate_polynomials()
     assert str(excinfo.value) == 'MetaModel cannot generate polynomials in the given scenario!'
-
+    
 
 def test_generate_polynomials_deg() -> None:
     """
@@ -168,100 +160,96 @@ def test_generate_polynomials_deg() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-
+    
     # Setup
     mm.InputSpace = InputSpace(inp)
     mm.InputSpace.n_init_samples = 2
     mm.InputSpace.init_param_space(np.max(mm.pce_deg))
     mm.ndim = mm.InputSpace.ndim
     mm.n_params = len(mm.input_obj.Marginals)
-
+        
     # Generate
     mm.generate_polynomials(4)
-
-
+    
+    
 #%% Test MetaMod.add_InputSpace
 
-def test_add_inputspace() -> None:
+def test_add_InputSpace() -> None:
     """
     Add InputSpace in MetaModel
     """
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.add_InputSpace()
-
-
+    
 #%% Test MetaModel.fit
 # Faster without these
-def test_fit() -> None:
-    """
-    Fit MetaModel
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mm = MetaModel(inp)
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4], [0.5]]})
-
-
-def test_fit_parallel() -> None:
-    """
-    Fit MetaModel in parallel
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mm = MetaModel(inp)
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4], [0.5]]}, parallel=True)
-
-
-def test_fit_verbose() -> None:
-    """
-    Fit MetaModel verbose
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mm = MetaModel(inp)
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4], [0.5]]}, verbose=True)
-
-
-def test_fit_pca() -> None:
-    """
-    Fit MetaModel verbose and with pca
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mm = MetaModel(inp)
-    mm.dim_red_method = 'pca'
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4, 0.4], [0.5, 0.6]]}, verbose=True)
-
-
-def test_fit_gpe() -> None:
-    """
-    Fit MetaModel
-    """
-    inp = Input()
-    inp.add_marginals()
-    inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    mm = MetaModel(inp)
-    mm.meta_model_type = 'gpe'
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4], [0.5]]})
-
-
+if 0:
+    def test_fit() -> None:
+        """
+        Fit MetaModel 
+        """
+        inp = Input()
+        inp.add_marginals()
+        inp.Marginals[0].dist_type = 'normal'
+        inp.Marginals[0].parameters = [0,1]
+        mm = MetaModel(inp)
+        mm.fit( [[0.2],[0.8]], {'Z':[[0.4],[0.5]]})
+        
+    def test_fit_parallel() -> None:
+        """
+        Fit MetaModel in parallel
+        """
+        inp = Input()
+        inp.add_marginals()
+        inp.Marginals[0].dist_type = 'normal'
+        inp.Marginals[0].parameters = [0,1]
+        mm = MetaModel(inp)
+        mm.fit( [[0.2],[0.8]], {'Z':[[0.4],[0.5]]}, parallel = True)
+        
+    def test_fit_verbose() -> None:
+        """
+        Fit MetaModel verbose
+        """
+        inp = Input()
+        inp.add_marginals()
+        inp.Marginals[0].dist_type = 'normal'
+        inp.Marginals[0].parameters = [0,1]
+        mm = MetaModel(inp)
+        mm.fit( [[0.2],[0.8]], {'Z':[[0.4],[0.5]]}, verbose = True)
+        
+            
+    def test_fit_pca() -> None:
+        """
+        Fit MetaModel verbose and with pca
+        """
+        inp = Input()
+        inp.add_marginals()
+        inp.Marginals[0].dist_type = 'normal'
+        inp.Marginals[0].parameters = [0,1]
+        mm = MetaModel(inp)
+        mm.dim_red_method = 'pca'
+        mm.fit( [[0.2],[0.8]], {'Z':[[0.4,0.4],[0.5,0.6]]}, verbose = True)
+        
+    def test_fit_gpe() -> None:
+        """
+        Fit MetaModel 
+        """
+        inp = Input()
+        inp.add_marginals()
+        inp.Marginals[0].dist_type = 'normal'
+        inp.Marginals[0].parameters = [0,1]
+        mm = MetaModel(inp)
+        mm.meta_model_type = 'gpe'
+        mm.fit( [[0.2],[0.8]], {'Z':[[0.4],[0.5]]})
+        
 #%% Test MetaModel.create_psi
-
+        
 def test_create_psi() -> None:
     """
     Create psi-matrix
@@ -269,16 +257,16 @@ def test_create_psi() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.2], [0.8]])
+    samples = np.array([[0.2],[0.8]])
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    create_psi(BasisIndices, univ_bas)
-
-
+    mm.create_psi(BasisIndices, univ_bas)
+    
+    
 #%% Test MetaModel.regression
 
 def test_regression() -> None:
@@ -288,20 +276,19 @@ def test_regression() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
     mm.regression(samples, outputs, psi)
-
-
+    
 def test_regression_ols() -> None:
     """
     Regression: ols
@@ -309,20 +296,19 @@ def test_regression_ols() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='ols')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'ols')
+    
 def test_regression_olssparse() -> None:
     """
     Regression: ols and sparse
@@ -330,20 +316,19 @@ def test_regression_olssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='ols', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'ols', sparsity = True)
+    
 def test_regression_ard() -> None:
     """
     Regression: ard
@@ -351,20 +336,19 @@ def test_regression_ard() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.2], [0.8]])
-    outputs = np.array([0.4, 0.5])
-
+    samples = np.array([[0.2],[0.8]])
+    outputs = np.array([0.4,0.5])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='ard')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'ard')
+    
 def test_regression_ardssparse() -> None:
     """
     Regression: ard and sparse
@@ -372,20 +356,19 @@ def test_regression_ardssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.2], [0.8]])
-    outputs = np.array([0.4, 0.5])
-
+    samples = np.array([[0.2],[0.8]])
+    outputs = np.array([0.4,0.5])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='ard', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'ard', sparsity = True)
+    
 def test_regression_fastard() -> None:
     """
     Regression: fastard
@@ -393,20 +376,19 @@ def test_regression_fastard() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='fastard')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'fastard')
+    
 def test_regression_fastardssparse() -> None:
     """
     Regression: fastard and sparse
@@ -414,20 +396,19 @@ def test_regression_fastardssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='fastard', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'fastard', sparsity = True)
+    
 def test_regression_brr() -> None:
     """
     Regression: brr
@@ -435,20 +416,19 @@ def test_regression_brr() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='brr')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'brr')
+    
 def test_regression_brrssparse() -> None:
     """
     Regression: brr and sparse
@@ -456,20 +436,19 @@ def test_regression_brrssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='brr', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'brr', sparsity = True)
+        
 def test_regression_bcs() -> None:
     """
     Regression: bcs
@@ -477,20 +456,19 @@ def test_regression_bcs() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9]])
-    outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])
+    samples = np.array([[0.0],[0.1],[0.2],[0.3],[0.4],[0.5],[0.6],[0.7],[0.8],[0.9]])
+    outputs = np.array([0.0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9])
     mm.pce_deg = 3
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(mm.pce_deg)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='bcs')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'bcs')
+    
 def test_regression_bcsssparse() -> None:
     """
     Regression: bcs and sparse
@@ -498,20 +476,20 @@ def test_regression_bcsssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]])
-    outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1])
-
+    samples = np.array([[0.0],[0.1],[0.2],[0.3],[0.4],[0.5],[0.6],[0.7],[0.8],[0.9],[1.0]])
+    outputs = np.array([0.0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.1])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='bcs', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'bcs', sparsity = True)
+    
+    
 def test_regression_lars() -> None:
     """
     Regression: lars
@@ -519,20 +497,19 @@ def test_regression_lars() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]])
-    outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1])
-
+    samples = np.array([[0.0],[0.1],[0.2],[0.3],[0.4],[0.5],[0.6],[0.7],[0.8],[0.9],[1.0]])
+    outputs = np.array([0.0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.1])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='lars')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'lars')
+    
 def test_regression_larsssparse() -> None:
     """
     Regression: lars and sparse
@@ -540,20 +517,19 @@ def test_regression_larsssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]])
-    outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1])
-
+    samples = np.array([[0.0],[0.1],[0.2],[0.3],[0.4],[0.5],[0.6],[0.7],[0.8],[0.9],[1.0]])
+    outputs = np.array([0.0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.1])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='lars', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'lars', sparsity = True)
+        
 def test_regression_sgdr() -> None:
     """
     Regression: sgdr
@@ -561,20 +537,19 @@ def test_regression_sgdr() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='sgdr')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'sgdr')
+    
 def test_regression_sgdrssparse() -> None:
     """
     Regression: sgdr and sparse
@@ -582,20 +557,20 @@ def test_regression_sgdrssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='sgdr', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'sgdr', sparsity = True)
+        
+    
 def test_regression_omp() -> None:
     """
     Regression: omp
@@ -603,20 +578,19 @@ def test_regression_omp() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='omp')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'omp')
+    
 def test_regression_ompssparse() -> None:
     """
     Regression: omp and sparse
@@ -624,20 +598,20 @@ def test_regression_ompssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='omp', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'omp', sparsity = True)
+        
+    
 def test_regression_vbl() -> None:
     """
     Regression: vbl
@@ -645,20 +619,19 @@ def test_regression_vbl() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='vbl')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'vbl')
+    
 def test_regression_vblssparse() -> None:
     """
     Regression: vbl and sparse
@@ -666,20 +639,19 @@ def test_regression_vblssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='vbl', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'vbl', sparsity = True)
+        
 def test_regression_ebl() -> None:
     """
     Regression: ebl
@@ -687,20 +659,19 @@ def test_regression_ebl() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='ebl')
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'ebl')
+    
 def test_regression_eblssparse() -> None:
     """
     Regression: ebl and sparse
@@ -708,20 +679,21 @@ def test_regression_eblssparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.5])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    mm.regression(samples, outputs, psi, reg_method='ebl', sparsity=True)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi, reg_method = 'ebl', sparsity = True)
+        
+    
+    
 #%% Test Model.update_pce_coeffs
 
 # TODO: very linked to the actual training...
@@ -735,16 +707,15 @@ def test_univ_basis_vals() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.2], [0.8]])
+    samples = np.array([[0.2],[0.8]])
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     mm.univ_basis_vals(samples)
-
-
+    
 #%% Test MetaModel.adaptive_regression
-
+     
 def test_adaptive_regression_fewsamples() -> None:
     """
     Adaptive regression, no specific method, too few samples given
@@ -752,25 +723,22 @@ def test_adaptive_regression_fewsamples() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.8])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
 
     # Evaluate the univariate polynomials on InputSpace
     if mm.meta_model_type.lower() != 'gpe':
-        mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints)
-
+       mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints)
+    
     with pytest.raises(AttributeError) as excinfo:
-        mm.adaptive_regression(outputs, 0)
-    assert str(
-        excinfo.value) == ('There are too few samples for the corrected loo-cv error. Fit surrogate on at least as '
-                           'many samples as parameters to use this')
-
-
+        mm.adaptive_regression(samples, outputs, 0)
+    assert str(excinfo.value) == 'There are too few samples for the corrected loo-cv error. Fit surrogate on at least as many samples as parameters to use this'
+        
 def test_adaptive_regression() -> None:
     """
     Adaptive regression, no specific method
@@ -778,20 +746,19 @@ def test_adaptive_regression() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.0], [0.1]])
-    outputs = np.array([0.0, 0.1])
-
+    samples = np.array([[0.0],[0.1]])
+    outputs = np.array([0.0,0.1])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
-
+    mm.build_metamodel(n_init_samples = 2)
+    
     # Evaluate the univariate polynomials on InputSpace
     if mm.meta_model_type.lower() != 'gpe':
-        mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints)
-    mm.adaptive_regression(outputs, 0)
-
-
+       mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints)
+    mm.adaptive_regression(samples, outputs, 0)
+            
 def test_adaptive_regression_verbose() -> None:
     """
     Adaptive regression, no specific method, verbose output
@@ -799,20 +766,19 @@ def test_adaptive_regression_verbose() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.0], [0.1]])
-    outputs = np.array([0.0, 0.1])
-
+    samples = np.array([[0.0],[0.1]])
+    outputs = np.array([0.0,0.1])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
-
+    mm.build_metamodel(n_init_samples = 2)
+    
     # Evaluate the univariate polynomials on InputSpace
     if mm.meta_model_type.lower() != 'gpe':
-        mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints)
-    mm.adaptive_regression(outputs, 0, True)
-
-
+       mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints)
+    mm.adaptive_regression(samples, outputs, 0, True)
+        
 def test_adaptive_regression_ols() -> None:
     """
     Adaptive regression, ols
@@ -820,22 +786,21 @@ def test_adaptive_regression_ols() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8],
-                        [0.9], [1.0]])
-    outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1])
-
+    samples = np.array([[0.0],[0.1],[0.2],[0.3],[0.4],[0.5],[0.6],[0.7],[0.8],
+                        [0.9],[1.0]])
+    outputs = np.array([0.0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.1])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
-
+    mm.build_metamodel(n_init_samples = 2)
+    
     # Evaluate the univariate polynomials on InputSpace
     if mm.meta_model_type.lower() != 'gpe':
-        mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints)
+       mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints)
     mm.pce_reg_method = 'ols'
-    mm.adaptive_regression(outputs, 0)
-
-
+    mm.adaptive_regression(samples, outputs, 0)
+    
 #%% Test MetaModel.corr_loocv_error
 
 def test_corr_loocv_error_nosparse() -> None:
@@ -845,23 +810,22 @@ def test_corr_loocv_error_nosparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7],
-                        [0.8], [0.9], [1.0]])
-    outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1])
-
+    samples = np.array([[0.0],[0.1],[0.2],[0.3],[0.4],[0.5],[0.6],[0.7],
+                        [0.8],[0.9],[1.0]])
+    outputs = np.array([0.0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.1])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    outs = mm.regression(samples, outputs, psi, reg_method='ebl')
-    corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'],
-                     outputs)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    outs = mm.regression(samples, outputs, psi, reg_method = 'ebl')
+    mm.corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'],
+                                          outputs)
+        
 def test_corr_loocv_error_singley() -> None:
     """
     Corrected loocv error
@@ -869,22 +833,21 @@ def test_corr_loocv_error_singley() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     samples = np.array([[0.2]])
     outputs = np.array([0.1])
-
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    outs = mm.regression(samples, outputs, psi, reg_method='ols')
-    corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'],
-                     outputs)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    outs = mm.regression(samples, outputs, psi, reg_method = 'ols')
+    mm.corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'],
+                                          outputs)
+        
 def test_corr_loocv_error_sparse() -> None:
     """
     Corrected loocv error from sparse results
@@ -892,26 +855,25 @@ def test_corr_loocv_error_sparse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7],
-                        [0.8], [0.9], [1.0]])
-    outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1])
-
+    samples = np.array([[0.0],[0.1],[0.2],[0.3],[0.4],[0.5],[0.6],[0.7],
+                        [0.8],[0.9],[1.0]])
+    outputs = np.array([0.0,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1.1])
+    
     mm.CollocationPoints = samples
-    mm.build_metamodel(n_init_samples=2)
+    mm.build_metamodel(n_init_samples = 2)
     BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
     univ_bas = mm.univ_basis_vals(samples)
-    psi = create_psi(BasisIndices, univ_bas)
-
-    outs = mm.regression(samples, outputs, psi, reg_method='ebl',
-                         sparsity=True)
-    corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'],
-                     outputs)
-
-
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    outs = mm.regression(samples, outputs, psi, reg_method = 'ebl',
+                         sparsity = True)
+    mm.corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'],
+                                          outputs)
+    
 #%% Test MetaModel.pca_transformation
-
+   
 def test_pca_transformation() -> None:
     """
     Apply PCA
@@ -919,12 +881,11 @@ def test_pca_transformation() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    outputs = np.array([[0.4, 0.4], [0.5, 0.6]])
+    outputs = np.array([[0.4,0.4],[0.5,0.6]])
     mm.pca_transformation(outputs)
 
-
 def test_pca_transformation_verbose() -> None:
     """
     Apply PCA verbose
@@ -932,12 +893,11 @@ def test_pca_transformation_verbose() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    outputs = np.array([[0.4, 0.4], [0.5, 0.6]])
+    outputs = np.array([[0.4,0.4],[0.5,0.6]])
     mm.pca_transformation(outputs, True)
-
-
+    
 def test_pca_transformation_varcomp() -> None:
     """
     Apply PCA with set var_pca_threshold
@@ -945,13 +905,12 @@ def test_pca_transformation_varcomp() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    outputs = np.array([[0.4, 0.4], [0.5, 0.6]])
+    outputs = np.array([[0.4,0.4],[0.5,0.6]])
     mm.var_pca_threshold = 1
     mm.pca_transformation(outputs)
-
-
+    
 def test_pca_transformation_ncomp() -> None:
     """
     Apply PCA with set n_pca_components
@@ -959,9 +918,9 @@ def test_pca_transformation_ncomp() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    outputs = np.array([[0.4, 0.4], [0.5, 0.6]])
+    outputs = np.array([[0.4,0.4],[0.5,0.6]])
     mm.n_pca_components = 1
     mm.pca_transformation(outputs)
 
@@ -975,9 +934,9 @@ def test_gaussian_process_emulator() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5])
-
+    inp.Marginals[0].parameters = [0,1]
+    mm = MetaModel(inp)
+    mm.gaussian_process_emulator( [[0.2],[0.8]], [0.4,0.5])
 
 def test_gaussian_process_emulator_nug() -> None:
     """
@@ -986,9 +945,9 @@ def test_gaussian_process_emulator_nug() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5], nug_term=1.0)
-
+    inp.Marginals[0].parameters = [0,1]
+    mm = MetaModel(inp)
+    mm.gaussian_process_emulator( [[0.2],[0.8]], [0.4,0.5],nug_term=1.0)
 
 def test_gaussian_process_emulator_autosel() -> None:
     """
@@ -997,21 +956,21 @@ def test_gaussian_process_emulator_autosel() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5], autoSelect=True)
-
+    inp.Marginals[0].parameters = [0,1]
+    mm = MetaModel(inp)
+    mm.gaussian_process_emulator( [[0.2],[0.8]], [0.4,0.5],autoSelect=True)
 
-def test_gaussian_process_emulator_varidx() -> None:
+def test_gaussian_process_emulator_varIdx() -> None:
     """
     Create GPE with var_idx
     """
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
-    gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5], varIdx=1)
-
-
+    inp.Marginals[0].parameters = [0,1]
+    mm = MetaModel(inp)
+    mm.gaussian_process_emulator( [[0.2],[0.8]], [0.4,0.5],varIdx=1)
+    
 #%% Test MetaModel.eval_metamodel
 
 def test_eval_metamodel() -> None:
@@ -1021,13 +980,12 @@ def test_eval_metamodel() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.out_names = ['Z']
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4], [0.5]]})
+    mm.fit( [[0.2],[0.8]], {'Z':[[0.4],[0.5]]})
     mm.eval_metamodel([[0.4]])
 
-
 def test_eval_metamodel_normalboots() -> None:
     """
     Eval trained MetaModel with normal bootstrap
@@ -1035,14 +993,13 @@ def test_eval_metamodel_normalboots() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.bootstrap_method = 'normal'
     mm.out_names = ['Z']
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4], [0.5]]})
+    mm.fit( [[0.2],[0.8]], {'Z':[[0.4],[0.5]]})
     mm.eval_metamodel([[0.4]])
 
-
 def test_eval_metamodel_highnormalboots() -> None:
     """
     Eval trained MetaModel with higher bootstrap-itrs
@@ -1050,14 +1007,13 @@ def test_eval_metamodel_highnormalboots() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.n_bootstrap_itrs = 2
     mm.out_names = ['Z']
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4], [0.5]]})
+    mm.fit( [[0.2],[0.8]], {'Z':[[0.4],[0.5]]})
     mm.eval_metamodel([[0.4]])
 
-
 def test_eval_metamodel_gpe() -> None:
     """
     Eval trained MetaModel - gpe
@@ -1065,14 +1021,14 @@ def test_eval_metamodel_gpe() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.meta_model_type = 'gpe'
     mm.out_names = ['Z']
-    mm.fit([[0.2], [0.8]], {'Z': np.array([[0.4], [0.5]])})
+    mm.fit( [[0.2],[0.8]], {'Z':np.array([[0.4],[0.5]])})
     mm.eval_metamodel([[0.4]])
 
-
+ 
 def test_eval_metamodel_pca() -> None:
     """
     Eval trained MetaModel with pca
@@ -1080,14 +1036,13 @@ def test_eval_metamodel_pca() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.dim_red_method = 'pca'
     mm.out_names = ['Z']
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4, 0.4], [0.5, 0.6]]})
+    mm.fit( [[0.2],[0.8]], {'Z':[[0.4,0.4],[0.5,0.6]]})
     mm.eval_metamodel([[0.4]])
-
-
+       
 #%% Test MetaModel.create_model_error
 # TODO: move model out of this function
 
@@ -1102,13 +1057,12 @@ def test_auto_vivification() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.auto_vivification()
-
-
+    
 #%% Test MetaModel.copy_meta_model_opts
-
+    
 def test_copy_meta_model_opts() -> None:
     """
     Copy the metamodel with just some stats
@@ -1116,12 +1070,11 @@ def test_copy_meta_model_opts() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.add_InputSpace()
     mm.copy_meta_model_opts()
-
-
+    
 #%% Test MetaModel.__select_degree
 
 #%% Test Engine._compute_pce_moments
@@ -1133,12 +1086,11 @@ def test__compute_pce_moments() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4], [0.5]]})
+    mm.fit( [[0.2],[0.8]], {'Z':[[0.4],[0.5]]})
     mm._compute_pce_moments()
 
-
 def test__compute_pce_moments_pca() -> None:
     """
     Compute moments of a pce-surrogate with pca
@@ -1146,13 +1098,12 @@ def test__compute_pce_moments_pca() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.dim_red_method = 'pca'
-    mm.fit([[0.2], [0.8]], {'Z': [[0.4, 0.4], [0.5, 0.6]]})
+    mm.fit( [[0.2],[0.8]], {'Z':[[0.4,0.4],[0.5,0.6]]})
     mm._compute_pce_moments()
 
-
 def test__compute_pce_moments_gpe() -> None:
     """
     Compute moments of a gpe-surrogate
@@ -1160,13 +1111,30 @@ def test__compute_pce_moments_gpe() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     mm.meta_model_type = 'gpe'
     with pytest.raises(AttributeError) as excinfo:
         mm._compute_pce_moments()
     assert str(excinfo.value) == 'Moments can only be computed for pce-type surrogates'
-
-
+    
 #%% Test MetaModel.update_metamodel
-# TODO: taken from engine
+#TODO: taken from engine
+
+if __name__ == '__main__':
+    
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'normal'
+    inp.Marginals[0].parameters = [0,1]
+    mm = MetaModel(inp)
+    samples = np.array([[0.2]])
+    outputs = np.array([0.5])
+    
+    mm.CollocationPoints = samples
+    mm.build_metamodel(n_init_samples = 2)
+    BasisIndices = mm.allBasisIndices[str(1)][str(1.0)]
+    univ_bas = mm.univ_basis_vals(samples)
+    psi = mm.create_psi(BasisIndices, univ_bas)
+    
+    mm.regression(samples, outputs, psi)
diff --git a/tests/test_Engine.py b/tests/test_engine.py
similarity index 56%
rename from tests/test_Engine.py
rename to tests/test_engine.py
index 6b03a2623..72dabd466 100644
--- a/tests/test_Engine.py
+++ b/tests/test_engine.py
@@ -31,7 +31,10 @@ Engine:
 import math
 import numpy as np
 import pandas as pd
+
 import sys
+sys.path.append("src/")
+#import pytest
 
 from bayesvalidrox.surrogate_models.inputs import Input
 from bayesvalidrox.surrogate_models.exp_designs import ExpDesigns
@@ -40,8 +43,6 @@ from bayesvalidrox.pylink.pylink import PyLinkForwardModel as PL
 from bayesvalidrox.surrogate_models.engine import Engine
 from bayesvalidrox.surrogate_models.engine import hellinger_distance, logpdf, subdomain
 
-sys.path.append("src/")
-
 
 #%% Test Engine constructor
 
@@ -53,13 +54,12 @@ def test_engine() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mod = PL()
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
     Engine(mm, mod, expdes)
 
-
 #%% Test Engine.start_engine
 
 def test_start_engine() -> None:
@@ -69,7 +69,7 @@ def test_start_engine() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mod = PL()
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
@@ -89,9 +89,9 @@ def test__error_Mean_Std() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.fit([[0.0], [1.0]], {'Z': [[0.5], [0.5]]})
+    mm.fit([[0.0],[1.0]], {'Z':[[0.5],[0.5]]})
     expdes = ExpDesigns(inp)
     mod = PL()
     mod.mc_reference['mean'] = [0.5]
@@ -100,9 +100,8 @@ def test__error_Mean_Std() -> None:
     engine = Engine(mm, mod, expdes)
     engine.start_engine()
     mean, std = engine._error_Mean_Std()
-    assert mean < 0.01 and std < 0.01
-
-
+    assert mean < 0.01 and std <0.01
+    
 #%% Test Engine._validError
 
 def test__validError() -> None:
@@ -112,20 +111,19 @@ def test__validError() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.fit([[0.0], [1.0]], {'Z': [[0.5], [0.5]]})
+    mm.fit([[0.0],[1.0]], {'Z':[[0.5],[0.5]]})
     expdes = ExpDesigns(inp)
     mod = PL()
     expdes.valid_samples = [[0.5]]
-    expdes.valid_model_runs = {'Z': [[0.5]]}
+    expdes.valid_model_runs = {'Z':[[0.5]]}
     mod.Output.names = ['Z']
     engine = Engine(mm, mod, expdes)
     engine.start_engine()
     mean, std = engine._validError()
-    assert mean['Z'][0] < 0.01  # and std['Z'][0] <0.01
-
-
+    assert mean['Z'][0] < 0.01 #and std['Z'][0] <0.01
+    
 #%% Test Engine._BME_Calculator
 
 def test__BME_Calculator() -> None:
@@ -135,22 +133,21 @@ def test__BME_Calculator() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.fit([[0.0], [0.5], [1.0]], {'Z': [[0.5], [0.4], [0.5]]})
+    mm.fit([[0.0],[0.5],[1.0]], {'Z':[[0.5],[0.4],[0.5]]})
     expdes = ExpDesigns(inp)
-    expdes.generate_ED(2, max_pce_deg=1)
+    expdes.generate_ED(2,transform=True,max_pce_deg=1)
     mod = PL()
     mod.Output.names = ['Z']
     engine = Engine(mm, mod, expdes)
     engine.start_engine()
-    obs_data = {'Z': np.array([0.45])}
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
+    obs_data = {'Z':np.array([0.45])}
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
     engine._BME_Calculator(obs_data, sigma2Dict)
     # Note: if error appears here it might also be due to inoptimal choice of training samples
 
-
 def test__BME_Calculator_rmse() -> None:
     """
     Calculate BME with given RMSE
@@ -158,22 +155,21 @@ def test__BME_Calculator_rmse() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.fit([[0.0], [0.5], [1.0]], {'Z': [[0.5], [0.4], [0.5]]})
+    mm.fit([[0.0],[0.5],[1.0]], {'Z':[[0.5],[0.4],[0.5]]})
     expdes = ExpDesigns(inp)
-    expdes.generate_ED(2, max_pce_deg=1)
+    expdes.generate_ED(2,transform=True,max_pce_deg=1)
     mod = PL()
     mod.Output.names = ['Z']
     engine = Engine(mm, mod, expdes)
     engine.start_engine()
-    obs_data = {'Z': np.array([0.45])}
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
-    engine._BME_Calculator(obs_data, sigma2Dict, rmse={'Z': 0.1})
+    obs_data = {'Z':np.array([0.45])}
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
+    engine._BME_Calculator(obs_data, sigma2Dict, rmse = {'Z':0.1})
     # Note: if error appears here it might also be due to inoptimal choice of training samples
 
-
 def test__BME_Calculator_lik() -> None:
     """
     Calculate BME with given validation likelihood and post-snapshot
@@ -181,23 +177,23 @@ def test__BME_Calculator_lik() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.fit([[0.0], [0.5], [1.0]], {'Z': [[0.5], [0.4], [0.5]]})
+    mm.fit([[0.0],[0.5],[1.0]], {'Z':[[0.5],[0.4],[0.5]]})
     expdes = ExpDesigns(inp)
-    expdes.generate_ED(2, max_pce_deg=1)
+    expdes.generate_ED(2,transform=True,max_pce_deg=1)
     mod = PL()
     mod.Output.names = ['Z']
     engine = Engine(mm, mod, expdes)
     engine.start_engine()
-    obs_data = {'Z': np.array([0.45])}
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
+    obs_data = {'Z':np.array([0.45])}
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
     expdes.post_snapshot = True
-
+    
     engine.valid_likelihoods = [0.1]
     engine._BME_Calculator(obs_data, sigma2Dict)
-
+    
 
 def test__BME_Calculator_2d() -> None:
     """
@@ -206,28 +202,28 @@ def test__BME_Calculator_2d() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     inp.add_marginals()
     inp.Marginals[1].dist_type = 'normal'
-    inp.Marginals[1].parameters = [0, 1]
+    inp.Marginals[1].parameters = [0,1]
     mm = MetaModel(inp)
-    mm.fit([[0.0, 0.0], [0.5, 0.1], [1.0, 0.9]], {'Z': [[0.5], [0.4], [0.5]]})
+    mm.fit([[0.0,0.0],[0.5,0.1],[1.0,0.9]], {'Z':[[0.5],[0.4],[0.5]]})
     expdes = ExpDesigns(inp)
-    expdes.generate_ED(2, max_pce_deg=1)
+    expdes.generate_ED(2,transform=True,max_pce_deg=1)
     mod = PL()
     mod.n_obs = 1
     mod.Output.names = ['Z']
     engine = Engine(mm, mod, expdes)
     engine.start_engine()
-    obs_data = {'Z': np.array([0.45])}
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
+    obs_data = {'Z':np.array([0.45])}
+    m_observations = obs_data
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
     expdes.post_snapshot = True
-
+    
     engine.valid_likelihoods = [0.1]
     engine._BME_Calculator(obs_data, sigma2Dict)
-
-
+    
 #%% Test hellinger_distance
 
 def test_hellinger_distance_isnan() -> None:
@@ -236,118 +232,112 @@ def test_hellinger_distance_isnan() -> None:
     """
     P = [0]
     Q = [1]
-    math.isnan(hellinger_distance(P, Q))
-
-
+    math.isnan(hellinger_distance(P,Q))
+    
 def test_hellinger_distance_0() -> None:
     """
     Calculate Hellinger distance-0
     """
-    P = [0, 1, 2]
-    Q = [1, 0, 2]
-    assert hellinger_distance(P, Q) == 0.0
-
-
+    P = [0,1,2]
+    Q = [1,0,2]
+    assert hellinger_distance(P,Q) == 0.0
+    
 def test_hellinger_distance_1() -> None:
     """
     Calculate Hellinger distance-1
     """
-    P = [0, 1, 2]
-    Q = [0, 0, 0]
-    assert hellinger_distance(P, Q) == 1.0
-
-
+    P = [0,1,2]
+    Q = [0,0,0]
+    assert hellinger_distance(P,Q) == 1.0
+    
 #%% Test Engine._normpdf
-
+   
 def test__normpdf() -> None:
     """
     Likelihoods based on gaussian dist
     """
-
+    
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
     mod = PL()
     mod.Output.names = ['Z']
-
-    y_hat_pce = {'Z': np.array([[0.12]])}
-    std_pce = {'Z': np.array([[0.05]])}
-    obs_data = {'Z': np.array([0.1])}
-    sigma2Dict = {'Z': np.array([0.05])}
-    total_sigma2s = pd.DataFrame(sigma2Dict, columns=['Z'])
-
+    
+    y_hat_pce =  {'Z':np.array([[0.12]])}
+    std_pce = {'Z':np.array([[0.05]])}
+    obs_data = {'Z':np.array([0.1])}
+    sigma2Dict = {'Z':np.array([0.05])}
+    total_sigma2s = pd.DataFrame(sigma2Dict, columns = ['Z'])
+    
     engine = Engine(mm, mod, expdes)
     engine.start_engine()
     engine._normpdf(y_hat_pce, std_pce, obs_data, total_sigma2s)
-
-
+      
 def test__normpdf_rmse() -> None:
     """
     Likelihoods based on gaussian dist with rmse
     """
-
+    
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
     mod = PL()
     mod.Output.names = ['Z']
-
-    y_hat_pce = {'Z': np.array([[0.12]])}
-    std_pce = {'Z': np.array([[0.05]])}
-    obs_data = {'Z': np.array([0.1])}
-    sigma2Dict = {'Z': np.array([0.05])}
-    total_sigma2s = pd.DataFrame(sigma2Dict, columns=['Z'])
-
+    
+    y_hat_pce =  {'Z':np.array([[0.12]])}
+    std_pce = {'Z':np.array([[0.05]])}
+    obs_data = {'Z':np.array([0.1])}
+    sigma2Dict = {'Z':np.array([0.05])}
+    total_sigma2s = pd.DataFrame(sigma2Dict, columns = ['Z'])
+    
     engine = Engine(mm, mod, expdes)
     engine.start_engine()
-    engine._normpdf(y_hat_pce, std_pce, obs_data, total_sigma2s, rmse={'Z': 0.1})
-
-
+    engine._normpdf(y_hat_pce, std_pce, obs_data, total_sigma2s, rmse = {'Z':0.1})
+    
+    
 #%% Test Engine._posteriorPlot
 
 def test__posteriorPlot() -> None:
     """
     Plot posterior
-    """
+    """    
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
-    expdes.generate_ED(2, max_pce_deg=1)
+    expdes.generate_ED(2,transform=True,max_pce_deg=1)
     mod = PL()
-    posterior = np.array([[0], [0.1], [0.2]])
+    posterior = np.array([[0],[0.1],[0.2]])
     engine = Engine(mm, mod, expdes)
     engine._posteriorPlot(posterior, ['i'], 'Z')
-
-
+    
 def test__posteriorPlot_2d() -> None:
     """
     Plot posterior for 2 params
-    """
+    """    
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     inp.add_marginals()
     inp.Marginals[1].dist_type = 'normal'
-    inp.Marginals[1].parameters = [0, 1]
+    inp.Marginals[1].parameters = [0,1]
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
-    expdes.generate_ED(2, max_pce_deg=1)
+    expdes.generate_ED(2,transform=True,max_pce_deg=1)
     mod = PL()
-    posterior = np.array([[0, 0], [0.1, 1.0], [0.2, 0.5]])
+    posterior = np.array([[0,0],[0.1,1.0],[0.2,0.5]])
     engine = Engine(mm, mod, expdes)
     engine._posteriorPlot(posterior, ['i', 'j'], 'Z')
-
-
+    
 #%% Test logpdf
 
 def test_logpdf() -> None:
@@ -355,8 +345,7 @@ def test_logpdf() -> None:
     Calculate log-pdf
     """
     logpdf(np.array([0.1]), np.array([0.2]), np.array([0.1]))
-
-
+    
 #%% Test Engine._corr_factor_BME
 # TODO: not used again here?
 
@@ -366,7 +355,7 @@ def test_subdomain() -> None:
     """
     Create subdomains from bounds
     """
-    subdomain([(0, 1), (0, 1)], 2)
+    subdomain([(0,1),(0,1)], 2)
 
 
 #%% Test Engine.tradeoff_weights
@@ -374,94 +363,89 @@ def test_subdomain() -> None:
 def test_tradeoff_weights_None() -> None:
     """
     Tradeoff weights with no scheme
-    """
+    """  
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
     mod = PL()
     engine = Engine(mm, mod, expdes)
-    weights = engine.tradeoff_weights(None, [[0], [1]], {'Z': [[0.4], [0.5]]})
+    weights = engine.tradeoff_weights(None, [[0],[1]], {'Z':[[0.4],[0.5]]})
     assert weights[0] == 0 and weights[1] == 1
-
-
+    
 def test_tradeoff_weights_equal() -> None:
     """
     Tradeoff weights with 'equal' scheme
-    """
+    """  
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
     mod = PL()
     engine = Engine(mm, mod, expdes)
-    weights = engine.tradeoff_weights('equal', [[0], [1]], {'Z': [[0.4], [0.5]]})
+    weights = engine.tradeoff_weights('equal', [[0],[1]], {'Z':[[0.4],[0.5]]})
     assert weights[0] == 0.5 and weights[1] == 0.5
-
-
+    
 def test_tradeoff_weights_epsdecr() -> None:
     """
     Tradeoff weights with 'epsilon-decreasing' scheme
-    """
+    """  
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 3
-    expdes.X = np.array([[0], [1]])
+    expdes.X = np.array([[0],[1]])
     mod = PL()
     engine = Engine(mm, mod, expdes)
-    weights = engine.tradeoff_weights('epsilon-decreasing', expdes.X, {'Z': [[0.4], [0.5]]})
+    weights = engine.tradeoff_weights('epsilon-decreasing', expdes.X, {'Z':[[0.4],[0.5]]})
     assert weights[0] == 1.0 and weights[1] == 0.0
-
-
+    
 def test_tradeoff_weights_adaptive() -> None:
     """
     Tradeoff weights with 'adaptive' scheme
-    """
+    """  
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     mm = MetaModel(inp)
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 3
-    expdes.X = np.array([[0], [1]])
+    expdes.X = np.array([[0],[1]])
     mod = PL()
     engine = Engine(mm, mod, expdes)
-    weights = engine.tradeoff_weights('adaptive', expdes.X, {'Z': [[0.4], [0.5]]})
+    weights = engine.tradeoff_weights('adaptive', expdes.X, {'Z':[[0.4],[0.5]]})
     assert weights[0] == 0.5 and weights[1] == 0.5
-
-
+    
 def test_tradeoff_weights_adaptiveit1() -> None:
     """
     Tradeoff weights with 'adaptive' scheme for later iteration (not the first)
-    """
+    """  
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
     mod = PL()
     engine = Engine(mm, mod, expdes)
-    engine._y_hat_prev, _ = mm.eval_metamodel(samples=np.array([[0.1], [0.2], [0.6]]))
+    engine._y_hat_prev, _ = mm.eval_metamodel(samples=np.array([[0.1],[0.2],[0.6]]))
     engine.tradeoff_weights('adaptive', expdes.X, expdes.Y)
-
-
+    
 #%% Test Engine.choose_next_sample
 
 def test_choose_next_sample() -> None:
@@ -471,26 +455,25 @@ def test_choose_next_sample() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.explore_method = 'random'
-    expdes.exploit_method = 'Space-filling'
-    expdes.util_func = 'Space-filling'
-
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='random'
+    expdes.exploit_method='Space-filling'
+    expdes.util_func='Space-filling'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
     x, nan = engine.choose_next_sample()
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    assert x.shape[0]==1 and x.shape[1] == 1
+    
 def test_choose_next_sample_da_spaceparallel() -> None:
     """
     Chooses new sample using dual-annealing and space-filling, parallel=True
@@ -498,27 +481,26 @@ def test_choose_next_sample_da_spaceparallel() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.explore_method = 'dual-annealing'
-    expdes.exploit_method = 'Space-filling'
-    expdes.util_func = 'Space-filling'
-
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='dual-annealing'
+    expdes.exploit_method='Space-filling'
+    expdes.util_func='Space-filling'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
     engine.parallel = True
     x, nan = engine.choose_next_sample()
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    assert x.shape[0]==1 and x.shape[1] == 1
+       
 def test_choose_next_sample_da_spacenoparallel() -> None:
     """
     Chooses new sample using dual-annealing and space-filling, parallel = False
@@ -526,27 +508,26 @@ def test_choose_next_sample_da_spacenoparallel() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.explore_method = 'dual-annealing'
-    expdes.exploit_method = 'Space-filling'
-    expdes.util_func = 'Space-filling'
-
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='dual-annealing'
+    expdes.exploit_method='Space-filling'
+    expdes.util_func='Space-filling'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
     engine.parallel = False
     x, nan = engine.choose_next_sample()
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    assert x.shape[0]==1 and x.shape[1] == 1
+    
 def test_choose_next_sample_loo_space() -> None:
     """
     Chooses new sample using all LOO-CV and space-filling
@@ -554,26 +535,25 @@ def test_choose_next_sample_loo_space() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.explore_method = 'LOO-CV'
-    expdes.exploit_method = 'Space-filling'
-    expdes.util_func = 'Space-filling'
-
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='LOO-CV'
+    expdes.exploit_method='Space-filling'
+    expdes.util_func='Space-filling'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
     x, nan = engine.choose_next_sample()
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    assert x.shape[0]==1 and x.shape[1] == 1
+    
 def test_choose_next_sample_vor_space() -> None:
     """
     Chooses new sample using voronoi, space-filling
@@ -581,26 +561,25 @@ def test_choose_next_sample_vor_space() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.explore_method = 'voronoi'
-    expdes.exploit_method = 'Space-filling'
-    expdes.util_func = 'Space-filling'
-
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='voronoi'
+    expdes.exploit_method='Space-filling'
+    expdes.util_func='Space-filling'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
     x, nan = engine.choose_next_sample()
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    assert x.shape[0]==1 and x.shape[1] == 1
+    
 def test_choose_next_sample_latin_space() -> None:
     """
     Chooses new sample using all latin-hypercube, space-filling
@@ -608,26 +587,25 @@ def test_choose_next_sample_latin_space() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'Space-filling'
-    expdes.util_func = 'Space-filling'
-
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='Space-filling'
+    expdes.util_func='Space-filling'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
     x, nan = engine.choose_next_sample()
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    assert x.shape[0]==1 and x.shape[1] == 1
+     
 def test_choose_next_sample_latin_alphD() -> None:
     """
     Chooses new sample using all latin-hypercube, alphabetic (D)
@@ -635,26 +613,25 @@ def test_choose_next_sample_latin_alphD() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'alphabetic'
-    expdes.util_func = 'D-Opt'
-
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='alphabetic'
+    expdes.util_func='D-Opt'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    x, nan = engine.choose_next_sample(var=expdes.util_func)
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    x, nan = engine.choose_next_sample(var = expdes.util_func)
+    assert x.shape[0]==1 and x.shape[1] == 1
+     
 def test_choose_next_sample_latin_alphK() -> None:
     """
     Chooses new sample using all latin-hypercube, alphabetic (K)
@@ -662,26 +639,25 @@ def test_choose_next_sample_latin_alphK() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'alphabetic'
-    expdes.util_func = 'K-Opt'
-
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='alphabetic'
+    expdes.util_func='K-Opt'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    x, nan = engine.choose_next_sample(var=expdes.util_func)
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    x, nan = engine.choose_next_sample(var = expdes.util_func)
+    assert x.shape[0]==1 and x.shape[1] == 1
+    
 def test_choose_next_sample_latin_alphA() -> None:
     """
     Chooses new sample using all latin-hypercube, alphabetic (A)
@@ -689,26 +665,25 @@ def test_choose_next_sample_latin_alphA() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'alphabetic'
-    expdes.util_func = 'A-Opt'
-
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='alphabetic'
+    expdes.util_func='A-Opt'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    x, nan = engine.choose_next_sample(var=expdes.util_func)
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    x, nan = engine.choose_next_sample(var = expdes.util_func)
+    assert x.shape[0]==1 and x.shape[1] == 1
+     
 def test_choose_next_sample_latin_VarALM() -> None:
     """
     Chooses new sample using all latin-hypercube, VarDesign (ALM)
@@ -716,27 +691,26 @@ def test_choose_next_sample_latin_VarALM() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'VarOptDesign'
-    expdes.util_func = 'ALM'
-
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='VarOptDesign'
+    expdes.util_func='ALM'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    x, nan = engine.choose_next_sample(var=expdes.util_func)
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    x, nan = engine.choose_next_sample(var = expdes.util_func)
+    assert x.shape[0]==1 and x.shape[1] == 1
+     
 def test_choose_next_sample_latin_VarEIGF() -> None:
     """
     Chooses new sample using all latin-hypercube, VarDesign (EIGF)
@@ -744,26 +718,25 @@ def test_choose_next_sample_latin_VarEIGF() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'VarOptDesign'
-    expdes.util_func = 'EIGF'
-
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='VarOptDesign'
+    expdes.util_func='EIGF'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    x, nan = engine.choose_next_sample(var=expdes.util_func)
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
+    x, nan = engine.choose_next_sample(var = expdes.util_func)
+    assert x.shape[0]==1 and x.shape[1] == 1
 
 def test_choose_next_sample_latin_VarLOO() -> None:
     """
@@ -772,27 +745,26 @@ def test_choose_next_sample_latin_VarLOO() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'VarOptDesign'
-    expdes.util_func = 'LOOCV'
-
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='VarOptDesign'
+    expdes.util_func='LOOCV'
+    
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    x, nan = engine.choose_next_sample(var=expdes.util_func)
-    assert x.shape[0] == 1 and x.shape[1] == 1
-
-
+    x, nan = engine.choose_next_sample(var = expdes.util_func)
+    assert x.shape[0]==1 and x.shape[1] == 1
+    
 def test_choose_next_sample_latin_BODMI() -> None:
     """
     Chooses new sample using all latin-hypercube, BayesOptDesign (MI)
@@ -800,29 +772,28 @@ def test_choose_next_sample_latin_BODMI() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'BayesOptDesign'
-    expdes.util_func = 'MI'
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='BayesOptDesign'
+    expdes.util_func='MI'
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    engine.observations = {'Z': np.array([0.45])}
-    # engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
-    engine.choose_next_sample(sigma2=sigma2Dict, var=expdes.util_func)
-
-
+    engine.observations = {'Z':np.array([0.45])}
+    #engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
+    x, nan = engine.choose_next_sample(sigma2=sigma2Dict, var = expdes.util_func)
+      
 def test_choose_next_sample_latin_BODALC() -> None:
     """
     Chooses new sample using all latin-hypercube, BayesOptDesign (ALC)
@@ -830,29 +801,28 @@ def test_choose_next_sample_latin_BODALC() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'BayesOptDesign'
-    expdes.util_func = 'ALC'
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='BayesOptDesign'
+    expdes.util_func='ALC'
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    engine.observations = {'Z': np.array([0.45])}
-    # engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
-    engine.choose_next_sample(sigma2=sigma2Dict, var=expdes.util_func)
-
-
+    engine.observations = {'Z':np.array([0.45])}
+    #engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
+    x, nan = engine.choose_next_sample(sigma2=sigma2Dict, var = expdes.util_func)
+   
 def test_choose_next_sample_latin_BODDKL() -> None:
     """
     Chooses new sample using all latin-hypercube, BayesOptDesign (DKL)
@@ -860,29 +830,29 @@ def test_choose_next_sample_latin_BODDKL() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'BayesOptDesign'
-    expdes.util_func = 'DKL'
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='BayesOptDesign'
+    expdes.util_func='DKL'
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    engine.observations = {'Z': np.array([0.45])}
-    # engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
-    engine.choose_next_sample(sigma2=sigma2Dict, var=expdes.util_func)
-
+    engine.observations = {'Z':np.array([0.45])}
+    #engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
+    x, nan = engine.choose_next_sample(sigma2=sigma2Dict, var = expdes.util_func)
 
+   
 def test_choose_next_sample_latin_BODDPP() -> None:
     """
     Chooses new sample using all latin-hypercube, BayesOptDesign (DPP)
@@ -890,29 +860,29 @@ def test_choose_next_sample_latin_BODDPP() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'BayesOptDesign'
-    expdes.util_func = 'DPP'
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='BayesOptDesign'
+    expdes.util_func='DPP'
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    engine.observations = {'Z': np.array([0.45])}
-    # engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
-    engine.choose_next_sample(sigma2=sigma2Dict, var=expdes.util_func)
-
+    engine.observations = {'Z':np.array([0.45])}
+    #engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
+    x, nan = engine.choose_next_sample(sigma2=sigma2Dict, var = expdes.util_func)
 
+   
 def test_choose_next_sample_latin_BODAPP() -> None:
     """
     Chooses new sample using all latin-hypercube, BayesOptDesign (APP)
@@ -920,59 +890,59 @@ def test_choose_next_sample_latin_BODAPP() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'BayesOptDesign'
-    expdes.util_func = 'APP'
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='BayesOptDesign'
+    expdes.util_func='APP'
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    engine.observations = {'Z': np.array([0.45])}
-    # engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
-    engine.choose_next_sample(sigma2=sigma2Dict, var=expdes.util_func)
-
+    engine.observations = {'Z':np.array([0.45])}
+    #engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
+    x, nan = engine.choose_next_sample(sigma2=sigma2Dict, var = expdes.util_func)
 
-def test_choose_next_sample_latin_BODMI_() -> None:
+   
+def test_choose_next_sample_latin_BODMI() -> None:
     """
     Chooses new sample using all latin-hypercube, BayesOptDesign (MI)
     """
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'BayesOptDesign'
-    expdes.util_func = 'MI'
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='BayesOptDesign'
+    expdes.util_func='MI'
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    engine.observations = {'Z': np.array([0.45])}
-    # engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
-    engine.choose_next_sample(sigma2=sigma2Dict, var=expdes.util_func)
-
+    engine.observations = {'Z':np.array([0.45])}
+    #engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
+    x, nan = engine.choose_next_sample(sigma2=sigma2Dict, var = expdes.util_func)
 
+   
 def test_choose_next_sample_latin_BADBME() -> None:
     """
     Chooses new sample using all latin-hypercube, BayesActDesign (BME)
@@ -980,29 +950,28 @@ def test_choose_next_sample_latin_BADBME() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'BayesActDesign'
-    expdes.util_func = 'BME'
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='BayesActDesign'
+    expdes.util_func='BME'
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    engine.observations = {'Z': np.array([0.45])}
-    # engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
+    engine.observations = {'Z':np.array([0.45])}
+    #engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
     engine.n_obs = 1
-    engine.choose_next_sample(sigma2=sigma2Dict, var=expdes.util_func)
-
+    x, nan = engine.choose_next_sample(sigma2=sigma2Dict, var = expdes.util_func)
 
 def test_choose_next_sample_latin_BADDKL() -> None:
     """
@@ -1011,28 +980,28 @@ def test_choose_next_sample_latin_BADDKL() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'BayesActDesign'
-    expdes.util_func = 'DKL'
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='BayesActDesign'
+    expdes.util_func='DKL'
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    engine.observations = {'Z': np.array([0.45])}
-    # engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
+    engine.observations = {'Z':np.array([0.45])}
+    #engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
     engine.n_obs = 1
-    engine.choose_next_sample(sigma2=sigma2Dict, var=expdes.util_func)
+    x, nan = engine.choose_next_sample(sigma2=sigma2Dict, var = expdes.util_func)
 
 
 def test_choose_next_sample_latin_BADinfEntropy() -> None:
@@ -1042,25 +1011,52 @@ def test_choose_next_sample_latin_BADinfEntropy() -> None:
     inp = Input()
     inp.add_marginals()
     inp.Marginals[0].dist_type = 'normal'
-    inp.Marginals[0].parameters = [0, 1]
+    inp.Marginals[0].parameters = [0,1]
     expdes = ExpDesigns(inp)
     expdes.n_init_samples = 2
     expdes.n_max_samples = 4
-    expdes.X = np.array([[0], [1], [0.5]])
-    expdes.Y = {'Z': [[0.4], [0.5], [0.45]]}
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
     expdes.tradeoff_scheme = 'equal'
-    expdes.explore_method = 'latin-hypercube'
-    expdes.exploit_method = 'BayesActDesign'
-    expdes.util_func = 'infEntropy'
+    expdes.explore_method='latin-hypercube'
+    expdes.exploit_method='BayesActDesign'
+    expdes.util_func='infEntropy'
     mm = MetaModel(inp)
     mm.fit(expdes.X, expdes.Y)
-    expdes.generate_ED(expdes.n_init_samples, max_pce_deg=np.max(mm.pce_deg))
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
     mod = PL()
     engine = Engine(mm, mod, expdes)
     engine.out_names = ['Z']
-    engine.observations = {'Z': np.array([0.45])}
-    # engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
-    sigma2Dict = {'Z': np.array([0.05])}
-    sigma2Dict = pd.DataFrame(sigma2Dict, columns=['Z'])
+    engine.observations = {'Z':np.array([0.45])}
+    #engine.choose_next_sample(sigma2=None, n_candidates=5, var='DKL')
+    sigma2Dict = {'Z':np.array([0.05])}
+    sigma2Dict = pd.DataFrame(sigma2Dict, columns = ['Z'])
     engine.n_obs = 1
-    engine.choose_next_sample(sigma2=sigma2Dict, var=expdes.util_func)
+    x, nan = engine.choose_next_sample(sigma2=sigma2Dict, var = expdes.util_func)
+
+    
+if __name__ == '__main__':
+    inp = Input()
+    inp.add_marginals()
+    inp.Marginals[0].dist_type = 'normal'
+    inp.Marginals[0].parameters = [0,1]
+    expdes = ExpDesigns(inp)
+    expdes.n_init_samples = 2
+    expdes.n_max_samples = 4
+    expdes.X = np.array([[0],[1],[0.5]])
+    expdes.Y = {'Z':[[0.4],[0.5],[0.45]]}
+    expdes.explore_method='dual-annealing'
+    expdes.exploit_method='Space-filling'
+    expdes.util_func='Space-filling'
+    
+    mm = MetaModel(inp)
+    mm.fit(expdes.X, expdes.Y)
+    expdes.generate_ED(expdes.n_init_samples, transform=True, max_pce_deg=np.max(mm.pce_deg))
+    mod = PL()
+    engine = Engine(mm, mod, expdes)
+    engine.out_names = ['Z']
+    engine.parallel = True
+    x, nan = engine.choose_next_sample()
+    assert x.shape[0]==1 and x.shape[1] == 1
+    
+    None
\ No newline at end of file
-- 
GitLab