From 223c099cfc2051a2776938c3b29c1bb3e5cb48f1 Mon Sep 17 00:00:00 2001 From: kohlhaasrebecca <rebecca.kohlhaas@outlook.com> Date: Fri, 23 Aug 2024 14:03:52 +0200 Subject: [PATCH] [fix] Changed InputSpace.apce to InputSpae.pce --- .coverage | Bin 0 -> 53248 bytes .../posterior/Z.npy | Bin 0 -> 176 bytes src/bayesvalidrox/.coverage | Bin 0 -> 53248 bytes .../surrogate_models/input_space.py | 15 +- tests/test_InputSpace.py | 2 +- tests/test_MetaModel.py | 927 +---------------- tests/test_PolynomialChaosEmulator.py | 952 ++++++++++++++++++ tests/test_supplementary.py | 162 +++ 8 files changed, 1140 insertions(+), 918 deletions(-) create mode 100644 .coverage create mode 100644 Outputs_SeqPosteriorComparison/posterior/Z.npy create mode 100644 src/bayesvalidrox/.coverage create mode 100644 tests/test_PolynomialChaosEmulator.py create mode 100644 tests/test_supplementary.py diff --git a/.coverage b/.coverage new file mode 100644 index 0000000000000000000000000000000000000000..2dac62dc3862970f94a717175f95a6889684b93f GIT binary patch literal 53248 zcmeI53ve98nSgtCX5WvNypq>?*cv_Tl@34S3&uD`!m=^O*s>ANSYf?djikl9JIl^4 zNMNiT+XwOil`E1Qq$*{btE(ebBwXt5s5rr3SDh|_s}x)w6-bfTiBc6fj$myifDX%= z`@3gG+Fi*Muv#c`J<|3+)6@O`e}Da&meix}y*EbFlGrb&;^DNo9#tTYLmi@s5OTuD z3m@~a!--&CfaLPdFL&xhtNY*b;#-l&_9()4db{u{&$qlQ+&}fioqy{-ZVx*naDyF; z03*N%FarO-38;HL_CQSycld#HxHl%HQ{jk|(nD|OP1~-$d7F6iwOel7ChBowt4oAq z)hh8?F(uzCCZ&|vAB{<3G|?B0gwxT)fS4YX%+wi0>LU{!#KBZY60WBC$yq4c2f5PH z0H{c&qVaHQSUeyNucQeKYiVgHtw(?oDLRlKDa2MIdz+Y&`lXbVh)9a=MQgOL&DC~f z#AOfo{oF$mEmJrZfp2#~X&_>@C_DN=QAAGl5$}6b;Y4ImQdWxLnG!~1@N+1wmo_EG z*Q8}J7ERD%CX{G8Dknr~NQz|AQs2A<TuGpr2f)iO4d~iW3bbF(Y1qo6Gn;vyoJMjZ zb3ZtjkWH+!8Hs7gOnpvd;=O?Uu1q{hDhG1thST!pa$YWPS~d#%k?Urjx|}U<L3NtQ zNsa2cGKuKj8EK~SSBkSaSJGl6<dq^R1hjzVZd+iZpF_|pr6J|+7(8!<!kM&8uR~Dz zp>+^=j#fJCfrbX|p}T2AA$UT)(X=vi;w)%1Gtrl6I^_Q4+YRX@g^h&tqF79n`)QXU zxkbHlpf%_BnKdSiaEf#^Q8p5Sr=MwSBhG>kVsA|DEo^JF#V8VV5Eu*0Wc`^$gme{! zlp!MDBc;Lv(rV+bJ}i9J4GEggI;SnLt>JP_XQl~+*3F!{)Eb*TP*=wtcG6~K)RrOa z1x?7T$$5rR^Hy96E18rzkbKx6Y37(6CCxr!+=1e(vfBdJ)?KbRM!L{C<HVub1$&^n znp5>2LB=KWu@)49#LRYqqKV+6yPpiF2gSL*lW1@}Xtf14RbS3=;zDR032jIAS*Xho z)lm9z81%`9FL0T}%(}c&8eVlzI0mChG8|1omxVD{p8-UYurCaW!@Y7QP1`TIq|KAe zJ+uDm3TCfO_K>QLCWr35M^=<5OsiztQm!^*#o%CAA)}BI6nkYkCWRCF+yFUtC}1<H zh}4I<vr_x44rQ3=7`P`$eKIE(GR)r$ozCoEa1xJ)6Me>fMW$h0D~$c5a}qoHAOac1 zAaqIvx@eJ3Q>k&6j98}r)k$DNw8q1IdUq$4lBe<BL1~UUeKy(}PD|-%ToOqV>I`jI z_l6ZQl9J#i%%yEX{==F9sjbjD5|UN2gFMQq4V*o2#TA^IF*>6@&V&@0)Z<~(ZR65X z&`64I&bF5>7;xcfC%>H#c6m6+rEY-?Zh8&&>J-dW#<PyAjV$mvd=bGnI~V~*fDvE> z7y(9r5nu!u0Y-okU<4QeM&K`x0MA)CJ6Zo*@b3`500BD~0Y-okU<4QeMt~7u1Q-EE zfDvE>7y(A$^GU#C;g@3bPdwIQj;{+Ce*~~$_4?K8T~I0ueiPw0@w=bT0y3`|0Y-ok zU<4QeMt~7u1Q-EEfDvE>7y(AWB;etfa^_C~EH1vzY5Wj?{QkcUJ%aFe@NK?}z9)T~ zysvr(y}ajf&z0_%-ID8F*CVcG=PBn6j$b=c4!ixNJ!pI0)@A)4>x0%>;VEGkKgEBS z@8uU+UbGBb>bW0r_i<OC^Puu$AA8#bqy{^$?zu&QogY1^1M*;OFdSBTw#t!ATuP*s zp6$`}^_kwD-te%bz@C?AUrHY8NlOacB4J3$B$F|Tq=g-vupQ)vR!~_}lFH;TY_2*0 zx^@LYS3_yK<k&FmZc@_3wG}11CM5=7pC0JCc{%7>Rg!Dn5^QKpq@&^3R!M=)t)O*Z z3uxU`lGZ&?;Dj6pt(}8mS=koP#KLJgMQApcOmi1(wb>>2Nik4&eG{mwF3Hzjox3_g z)9yyl)L4?H9f@Qn-JOKp%b;(E2>Jr0>m$q>N@ligC^=KVUCTgSb4i|UjViD`Dha!k zNpHJ-DJcA6NeZ`+o=q#YQ;sLYsi-28rg(QfXdd|Znw3<fXt1C|M<|*w_G;^QLvhlK zLa;q;*5qAvVDgTIFqxELWJ;!F*cq<CuI`?qYbai%7HsV-hpiA4HJT2Ez({Wm7}>E9 zMqo>5N*;hsaXL4pCrTQ;0ybhxz{Xd~Vk0FDg!;okG8Rt8^sY4+0Bg6D*IGE$2j#kV zF<4T{Vo6S=2ju}d5sro66+DqaXd$6wCZ%MeWR{Ss!Ejeu3=hPlA-V@0%tWie%xz^c zBf&f!f^D9mR0j59CWi<8Wvev3Z<D>yVKT+U7J;SmG{)r2ycUX(iH%sRESt6B{zfXS zqHGmb{0udK!Bls7+Qv{aCa2&{9dwW%@RiLF9iT#rF(oIwU?^Oc5`{_2fQba2@lqN3 zh5@Lo0Z-WqCJiM+ee{8iw84Hi7}~oK1uLE=`wAxdkPGZ|mcvdw5|2PZ?s9^G1(|${ z3sQ6qwjvI&wIGvkk?x+n$v18XBcX*LRy-K=C^Jv)MoWM)_1eH(dFJ|=MMs|%Z0ssa zk@CRZE`XI!@Ju=P$xmO%_2>c^eVGT%8yCWVLvx<j-4@V&#X{)LQ+g8zO4~lZ(!BNY z7V>z#!+iJ!2;}$wZ3y1Yce_{cJRyAB{fcLU^&xAWZ_4)%zAw4nb12@|91eHCtHu7L z>rw05_EyJlY%kdMc%#lAICtXj3cLAB{4;!n_gh}F+-F(J{g}I-TZw)PDnI^llb{B< zlFC;+ceU*l)SA*%&a{`--2%MNp(OXn<Y2D<gQdGR(-l|TBB-lMb8VjW|MDI|-Bg;^ z%dh`iN~f8>{%_tPsMV$UYOMd8x&*bcG))ET|Hc~xHLw8sptFjlGs|E9H|!DA=F&VX zT>me-Lr}k1nnF52&0YU5O$qA2C(t~17AT&h_3L@o|Mdq1^^OHGS+w4tXZ>H-C#XBi zVk>X@sEr8fjs-C?=MiLP{a+Io)UTAs#yso)B~d}WrOej!^?zVUP?hpn`tbGt;(kHx zDv#m(^?&tUf_htd%oMNxtIAhtdS9Md|NG0=80T64FDjq4;{FCxL1p<WtoRwKVo*@K z%hWas*8jMCh6>mJK3P!1<tfp;>wj<g3RbxO_Y4T?-UTUG@ibYu{&(*e)XuWlG1mXC z1)F?}3sQ6qUCFs%lW&pk{Pn*hA*i7RAyzyX%)9=#mv640S#;PWLETlJBIP}`SU=%2 z<=iK~;`P6<Pf#~5i2rlf|NPB@dc}h1&O>2=^>LfCqy^}f;`Kj!|KG>|F$K%P2rvSS z03*N%FanGKBftnS0*nA7zzBSf2=E+_yyW-)9G*h(%??I@5nu!u0Y-okU<4QeMt~7u z1Q-EEfD!l$B*61tC*A*VeIDTqj^RGv`@Rc4$@_Kh!=CSZwtN2C{g(UZ?!R~Ux!1a0 zbwyltPR;q>@OS^i&a0iRPSJ7HvBuHj2-wfte_>bcz4nc^AKKb&KI^pgSD=y|i~u9R z2rvTWB2e2VaLy~+gx`Kpp><5vbxe81X)nqh>pD2jH@CvQ>*4h$6W1L*b*goB)15s% zJ*Q8dy6W!n@!qW!jZG_>P}UcOM9YH0!R9+xE^cnBTC)PK_rr@xQ1+kY(UDOtJINuG z<3V&xA@LCEn6jga=?8*S7k#;ram_xw+{!uC6uf=}oqe?-r;TZTZDf4p#AG`f)9O%8 zvt=ho_>QSFT!khhaxPAvN2A&XG_HBGlbWq#3T^|RjwubSXm)bK!3hs}?T`qAo(4qz zq3+BvcFezB*eX2GLQLNW?<hgf;PK8W|H$}PCmY63OxAZC^ofT?PfQ*=F?nRNu405A zmjTV_87{X$oVJaf&q5^8Mx=3*{<q$|L@0;HWuzz7Y~u7-`)K=kL+-1ScF^2j(LRoY z&46kTyu<`0H;>|x(S|Xc^S9@~3q0Nh&WyIBT<4TJ$t`VygkOT!lbqHb96AFy;hP@4 zz2feA%icy(jHX5*SvTfCF?nLruYqI1=}K)3)U<=i?I567$4173(@-SxJ))PcaoVpP zgD<~E0F8&H{Mx8?Y{V&o#VS$w_EG-I1~^T?YeUXe1|Bl(iql6X|K`lZ_No5>U%^Kx z*P7`7a%3m1Xgtt9c4l(Tw0jxkTe3_@Y#n>p?p#Xr!Mi=yRCd=BZ&%b$w690m-@l?& z5}hJ^C{U14IC5zoON^5aD#6RF5s{b7qSn-b*27<@o6vrQDojm!LJt+QxoAbr=$LcT zF(+o;h(a5APQlBN>3?n}Jjs!@)k4{QwSyHkAe(9&JVGbk6+D{YH!p#hgG;W07;=E4 z4*j1#Tl9zz53|<<Ap4<!W5i5uqN0i*vl*YKhs)9n!a!KP81mn_*wIA@!CN=rFdG&* zCx;`iU=>x>kp8c#9sT6a*)?atV_R0Jt%BGqt3cxuB%T~u5`Zelb$(LU9{-DLI+0)@ zQTxd)$PMN#1=WlU`)lca<ePQHfyC4zqNi`s=z#~%-9pVHG|@MOv~E;unP`8!|Drn! z`Fwg_vlyh19cEO~d0sU5YgB*^AMzwY#ch?u=IYAPO|C5~z;QdxB67%Po=|B)whjxa zI<Q6VWdHQW%Nrj-d(a?UK$#pCsD<pmI?zd!y*7&u3XU&S0I>dw7lR1Jo?e8qAq)ET z2<-(#iGCRM02O+&okPd7Ycb^M#iKRA9F%6wbX1aZ&KyRadp7wX$B=I{f|AoGEc;vD zIPu1XgBJAW#&end$K~ZH`@*?*4vlEj)5-_D^1;QO2wkOc=YBQ1{=IWQ{>N%BWM1jL z9<)C3&9_g#<aw@o;_20a>pcWXg=h5AbtwBDEwf>F;`}A8y?r~%?z5m@{+r^sWAa%` z_G`i6Kb=76wSjN^@}O-wLXV-z>MW%vRJy@Hi+imO{=fd$f@%{kqP)j70VBf6%>dvh z>mPMe^&KSCQ+5zA1?468<~y=Rq%J$?l^*KdqC?&51cOJKoD<U{HK6vLUz`HP?+?(F zdT@3`)3g&>*EL&SzJF2T^iP(oY~~7$o1=hHUwL`q6gtmS4Rz@6Xe?<3HIEIv{I!9{ z(47ab%*$Z;-tculI|_W}J;c1&4zzzl`+<GmZRiiLKYIhR=G~&Fg53dFD;%$J>TWwJ zT)lnl;-%xidqXSqx;Wouu1TiBhyVC98)`K>i0QV`%JMVL6J_2u`gB1GBTlajNJzWC znZcCQ9ARux?KUXTVV`YGyWl}*U(vMgEWA|2JUY->-8IpS`DhZn$hN%loo>@L0cD?0 z^rvR7^R;B@g~MhA#y;!m0|IHUw+f@vK?_>@`uf?VXW!Sf_o3lWKdb%A#p@uRxJG>? zMbQs^5HYW)G<+}-(>MV=u^0vr&c+j?{Gi=JD%kSpGpd8LI-Q5%^)%@8C41x8F`R1{ zzXV#xw63Z4oYpwKId`Gck(1Hr2s{W~h>xFt?%2qM2Zx?}_l2?7pFel(`}aQ|JonDK z$A_MK^<v|>3t%mG2!^;0YF76lcZm$B73axKG<xp^|Alin_s-_rI}gCc*mE27&_ehB z`+kq`Mf_*{NBjqT9>0cvivJV;J3ft{#n0fU@Jakl{1|>1AH@&gd$5cLaTtFYcjFuJ zHhc}<fY;(S+=T0J74~AAPxDRsa=y1fB|8`aMt~7u1Q-EEfDvE>7y(9r5nu!uflrfw z4OZP;TN@RvtyBbqRIFG*#q#A;w6svs+)PDN6BUh(REQ!K4GmN*TSmpwrBu|{Q&Cq( zMQtq=H8oT$Swcl1K*i$4R8&_}QB_5S-%rJ&MO0K)Qc+Ps1;$kPd{lV7RCqj8xZPB^ zTvRxnR5%<|*zHu<Y*bjSR0skUJWqwiA_z_={r&&U5AQ$CT4A;r0Y-okU<4QeMt~7u z1Q-EEfDvE>7y(A$QzJlr|IgO{pBipvh!J1}7y(9r5nu!u0Y-okU<4QeMt~9cObF2Z z|9Cz5`~UZF4!@1xz~lHgum|Ah_+|VO9>Ztw4`Cm`_wdvBpYW6TTljI<3-Bl&#YgZV zJdD$@AK)%5;r)0Yz7_9<JptSC7Q7i>1uEIW2rvSS03*N%FanGKBftnS0*nA7zzBSv z3Gi+3-h3Y3s^H~=Bv?U$<s@h!K{E-ONYF?EkpvASSVn@SB&a7r9SLelP(y+xBnXgT zF$t<kP(=bi2^Nu{k_7PX8844X;3I*T1RfH&N#G)ZlLYXl8ZU1rfsF)K5(p&VNnjxX IdH?_a0U))~ZvX%Q literal 0 HcmV?d00001 diff --git a/Outputs_SeqPosteriorComparison/posterior/Z.npy b/Outputs_SeqPosteriorComparison/posterior/Z.npy new file mode 100644 index 0000000000000000000000000000000000000000..8d89efa6714257ec2d867aa5eb95b7f23b915010 GIT binary patch literal 176 zcmbR27wQ`j$;eQ~P_3SlTAW;@Zl$1ZlV+i=qoAIaUsO_*m=~X4l#&V(cT3DEP6dh= qXCxM+0{I%oItoUbItsN4WCJb+G%#x>2<)_n2!F7L@J~Yd59|R^<R<I@ literal 0 HcmV?d00001 diff --git a/src/bayesvalidrox/.coverage b/src/bayesvalidrox/.coverage new file mode 100644 index 0000000000000000000000000000000000000000..a9949086ff22941448ad6b80430111e3dd99e24f GIT binary patch literal 53248 zcmeI)O>f&a7zc1EZmrl!;0{3`1U`h9%`s}bFmf2S9ky)Cp;&>gTd>Q3xwK3t>MY68 zOI{Ac;C4WO0s98SzSF+OE<NqE%brJ+EIWyd-U_IHA$IgaQsj>hNxN$M{j(=-ti(77 zJsFDw<AGtC#uq{uhOthsEqdjfMLYNM1Nv<)?62BgH+tiLwi-Veo7F#z#!p*M8{N%6 zw?5kVd(*4^zVVkOYYtt&1_1~_00Qr~!0g$kW$*5qFMo;UaH8T+Ix5ufTVH*5ba;Fu zjt{?lawPOQv0oRob-UtFgu#WFs!)vGi4v|qavd4F{)vcBRh~MD)QA^4I!8+#C+wy9 zd2is3C|9gbsEBFkdNRBcKdY-wnjpJdtS)1Hf(lXYiO(s-ewMv0LN!*Q@*NfFTI{={ zcD?=T*SckIZ=2UDZIcWg`q@}&4V@^O@_0-IIYBt$+7CnNJEtn@2sv+|6Hq-bW8K;? z@E*m1n7Dr0Oh0mCH}HkJR8A7B(VY$KFj~G5-ePO0tmoXI=Q?M0uLYeV^Bp;}<XPr1 zRn8A`)*>U{nlh(#_7iVN<S!C$%AG?w^o`?SRnAp;;~;D7TldW!eOa`3sXKFW?opj9 z@!hjT%{#v%ikzLa8Gg_a+z5(*4>zjzC)=h$QK>GYvk86P8ptFL(&GVj{-959o4<Xq zZrOW#=Ji<`D9ke$x^Xn$sVxP~{Pc>@;p?m8hDXVAknkv)OhhnFhYaTyx^pO+7st$_ z97BdY&_s|Wq?*3dXyeLK9mH@F43}dqjhIN$0HL`cPc}|`hlfhUZE%8f70MIU%dYBa z;avw3g-*X#wU72zh0Z(#2L1VNeYRV*>}Jz^Sxdtx>#gjUwG={mPu?+&=9fGrqa@5H zk~d9~d5&V1%*RM}g__f~s`g=XRdcd*gMPNNKC|vw_Rfwu(<6fCC4SvsY6PDthQN)5 z;O(nFmGP;#U3WfB6>r_I+Mn&LsyJ6+(C7E+s~^|WDqruW`W{XC{HiQfna>ne{zhGO z&*g+>lc{t=8nQG8>lHw7!jYuJau_6W8ozv)h9|E*MSXPv#c^&=soH6B8s6UqQRLFH z%F9;tX?|9;PG!WiP}CB`AeboW>$QP$JdWs|ZY0t=++LMFFUp9nxQvtYR$8Zgap7tH zm<C-wzG%nuq(92mD_(|msWkWV;N<)0NrW<r77faXhUkql^Fp($JY(hM_oigRrFwFt zhdX!5LWvir>Q-@jHTqh{Dt0|3I7wQWG_WpYB%DyuMOsVSt;(CifO~7u=Xd^=%-^1k z*`8_H4<DMdBpZx+o*6{6sCzQ6c6PY76eKq;7V+|wCS3Zovlu6Ax4^-N`VKs~=^h+v zg*;XES*PCSFZgETKZAa-K>z{}fB*y_009U<00Izz00baVK7oq4W?K6D|7)Z1x_k{p ziy#022tWV=5P$##AOHafKmY>osX)W5H0|s!0esv$=pEDv!fgC&&<{2UKmY;|fB*y_ z009U<00Izz00hb;P_Hy=*&hO=|G@u2nHq<-KmY;|fB*y_009U<00Izz00ba#Q-GiU z(|`Jh4FV8=00bZa0SG_<0uX=z1Rwx`5)0t@e~CL6O@jagAOHafKmY;|fB*y_009VO z0(kz9P=EjgAOHafKmY;|fB*y_009V;UjWbl%iqUnAp{@*0SG_<0uX=z1Rwwb2tWYO z{}BTafB*y_009U<00Izz00bZaf$|IB`G5KQ7%hYV1Rwwb2tWV=5P$##AOHaf;Q2pd z00Izz00bZa0SG_<0uX=z1Rzj;0X+XNe;=cT5P$##AOHafKmY;|fB*y_0D=DjZpReE literal 0 HcmV?d00001 diff --git a/src/bayesvalidrox/surrogate_models/input_space.py b/src/bayesvalidrox/surrogate_models/input_space.py index 99b0d5d39..7eedbbe0d 100644 --- a/src/bayesvalidrox/surrogate_models/input_space.py +++ b/src/bayesvalidrox/surrogate_models/input_space.py @@ -30,7 +30,7 @@ class InputSpace: self.meta_model_type = meta_model_type # Other - self.apce = None + self.pce = None self.bound_tuples = None self.input_data_given = None self.JDist = None @@ -61,12 +61,11 @@ class InputSpace: inputs = self.InputObj self.ndim = len(inputs.Marginals) - # Check if PCE or aPCE metamodel is selected. - # TODO: test also for 'pce'?? - if self.meta_model_type.lower() == 'apce': - self.apce = True + # Check if PCE metamodel is selected. + if self.meta_model_type.lower() == 'pce': + self.pce = True else: - self.apce = False + self.pce = False # check if marginals given if not self.ndim >= 1: @@ -293,7 +292,7 @@ class InputSpace: f"{parIdx + 1} is not available.") raise ValueError(message) - if self.input_data_given or self.apce: + if self.input_data_given or not self.pce: polytype = 'arbitrary' # Store dists and poly_types @@ -376,7 +375,7 @@ class InputSpace: disttypes.append(inputs.Marginals[par_i].dist_type) # Pass non-transformed X, if arbitrary PCE is selected. - if None in disttypes or self.input_data_given or self.apce: + if None in disttypes or self.input_data_given or not self.pce: return X cdfx = np.zeros(X.shape) diff --git a/tests/test_InputSpace.py b/tests/test_InputSpace.py index 40f269563..60893db8f 100644 --- a/tests/test_InputSpace.py +++ b/tests/test_InputSpace.py @@ -125,7 +125,7 @@ def test_check_valid_input_noapc() -> None: inp.add_marginals() inp.Marginals[1].dist_type = 'normal' inp.Marginals[1].parameters = [0, 1] - InputSpace(inp, meta_Model_type='gpe') + InputSpace(inp, meta_model_type='gpe') #%% Test ExpDesign.build_polytypes diff --git a/tests/test_MetaModel.py b/tests/test_MetaModel.py index 5cea0dcae..dd1eb29f2 100644 --- a/tests/test_MetaModel.py +++ b/tests/test_MetaModel.py @@ -33,8 +33,8 @@ sys.path.append("src/") from bayesvalidrox.surrogate_models.inputs import Input from bayesvalidrox.surrogate_models.input_space import InputSpace -from bayesvalidrox.surrogate_models.surrogate_models import MetaModel, corr_loocv_error, create_psi -from bayesvalidrox.surrogate_models.surrogate_models import gaussian_process_emulator +from bayesvalidrox.surrogate_models.surrogate_models import MetaModel +from bayesvalidrox.surrogate_models.supplementary import create_psi @@ -51,137 +51,31 @@ def test_metamod() -> None: MetaModel(inp) -#%% Test MetaModel.build_metamodel +#%% Test MetaModel.check_is_gaussian -def test_build_metamodel_nosamples() -> None: - """ - Build MetaModel without collocation samples - """ +def test_check_is_gaussian() -> None: inp = Input() inp.add_marginals() inp.Marginals[0].dist_type = 'normal' inp.Marginals[0].parameters = [0, 1] mm = MetaModel(inp) - with pytest.raises(AttributeError) as excinfo: - mm.build_metamodel() - assert str(excinfo.value) == 'Please provide samples to the metamodel before building it.' - - -def test_build_metamodel() -> None: - """ - Build MetaModel - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - mm.CollocationPoints = np.array([[0.2], [0.8]]) - mm.build_metamodel() - + mm.check_is_gaussian() -def test_build_metamodel_ninitsamples() -> None: - """ - Build MetaModel with n_init_samples - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - mm.CollocationPoints = np.array([[0.2], [0.8]]) - mm.build_metamodel(n_init_samples=2) +#%% Test MetaModel.build_metamodel -def test_build_metamodel_gpe() -> None: +def test_build_metamodel() -> None: """ - Build MetaModel gpe + Build MetaModel """ inp = Input() inp.add_marginals() inp.Marginals[0].dist_type = 'normal' inp.Marginals[0].parameters = [0, 1] mm = MetaModel(inp) - mm.meta_model_type = 'gpe' - mm.CollocationPoints = np.array([[0.2], [0.8]]) mm.build_metamodel() - - -def test_build_metamodel_coldimerr() -> None: - """ - Build MetaModel with wrong shape collocation samples - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - mm.CollocationPoints = [[0.2, 0.8]] - with pytest.raises(AttributeError) as excinfo: - mm.build_metamodel() - assert str( - excinfo.value) == 'The second dimension of X should be the same size as the number of marginals in the InputObj' - - -#%% Test MetaMod.generate_polynomials - -def test_generate_polynomials_noexp() -> None: - """ - Generate polynomials without ExpDeg - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - with pytest.raises(AttributeError) as excinfo: - mm.generate_polynomials() - assert str(excinfo.value) == 'Generate or add InputSpace before generating polynomials' - - -def test_generate_polynomials_nodeg() -> None: - """ - Generate polynomials without max_deg - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - - # Setup - mm.InputSpace = InputSpace(inp) - mm.InputSpace.n_init_samples = 2 - mm.InputSpace.init_param_space(np.max(mm.pce_deg)) - mm.ndim = mm.InputSpace.ndim - mm.n_params = len(mm.input_obj.Marginals) - - # Generate - with pytest.raises(AttributeError) as excinfo: - mm.generate_polynomials() - assert str(excinfo.value) == 'MetaModel cannot generate polynomials in the given scenario!' - - -def test_generate_polynomials_deg() -> None: - """ - Generate polynomials with max_deg - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - - # Setup - mm.InputSpace = InputSpace(inp) - mm.InputSpace.n_init_samples = 2 - mm.InputSpace.init_param_space(np.max(mm.pce_deg)) - mm.ndim = mm.InputSpace.ndim - mm.n_params = len(mm.input_obj.Marginals) - - # Generate - mm.generate_polynomials(4) - + + #%% Test MetaMod.add_InputSpace @@ -261,656 +155,6 @@ def test_fit_gpe() -> None: mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) -#%% Test MetaModel.create_psi - -def test_create_psi() -> None: - """ - Create psi-matrix - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2], [0.8]]) - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - create_psi(BasisIndices, univ_bas) - - -#%% Test MetaModel.regression - -def test_regression() -> None: - """ - Regression without a method - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi) - - -def test_regression_ols() -> None: - """ - Regression: ols - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='ols') - - -def test_regression_olssparse() -> None: - """ - Regression: ols and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='ols', sparsity=True) - - -def test_regression_ard() -> None: - """ - Regression: ard - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2], [0.8]]) - outputs = np.array([0.4, 0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='ard') - - -def test_regression_ardssparse() -> None: - """ - Regression: ard and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2], [0.8]]) - outputs = np.array([0.4, 0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='ard', sparsity=True) - - -def test_regression_fastard() -> None: - """ - Regression: fastard - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='fastard') - - -def test_regression_fastardssparse() -> None: - """ - Regression: fastard and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='fastard', sparsity=True) - - -def test_regression_brr() -> None: - """ - Regression: brr - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='brr') - - -def test_regression_brrssparse() -> None: - """ - Regression: brr and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='brr', sparsity=True) - - -if 0: # Could not figure out these errors, issue most likely in chosen samples/outputs - def test_regression_bcs() -> None: - """ - Regression: bcs - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9]]) - outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]) - mm.pce_deg = 3 - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(mm.pce_deg)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='bcs') - - - def test_regression_bcsssparse() -> None: - """ - Regression: bcs and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]]) - outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='bcs', sparsity=True) - - -def test_regression_lars() -> None: - """ - Regression: lars - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]]) - outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='lars') - - -def test_regression_larsssparse() -> None: - """ - Regression: lars and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]]) - outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='lars', sparsity=True) - - -def test_regression_sgdr() -> None: - """ - Regression: sgdr - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='sgdr') - - -def test_regression_sgdrssparse() -> None: - """ - Regression: sgdr and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='sgdr', sparsity=True) - - -if 0: # Could not figure out these errors, issue most likely in chosen samples/outputs - def test_regression_omp() -> None: - """ - Regression: omp - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]]) - outputs = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='omp') - - - def test_regression_ompssparse() -> None: - """ - Regression: omp and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='omp', sparsity=True) - - -def test_regression_vbl() -> None: - """ - Regression: vbl - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='vbl') - - -def test_regression_vblssparse() -> None: - """ - Regression: vbl and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='vbl', sparsity=True) - - -def test_regression_ebl() -> None: - """ - Regression: ebl - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='ebl') - - -def test_regression_eblssparse() -> None: - """ - Regression: ebl and sparse - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.5]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - mm.regression(samples, outputs, psi, reg_method='ebl', sparsity=True) - - -#%% Test Model.update_pce_coeffs - -# TODO: very linked to the actual training... - -#%% Test MetaModel.univ_basis_vals - -def test_univ_basis_vals() -> None: - """ - Creates univariate polynomials - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2], [0.8]]) - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - mm.univ_basis_vals(samples) - - -#%% Test MetaModel.adaptive_regression - -def test_adaptive_regression_fewsamples() -> None: - """ - Adaptive regression, no specific method, too few samples given - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.8]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - - # Evaluate the univariate polynomials on InputSpace - if mm.meta_model_type.lower() != 'gpe': - mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints) - - with pytest.raises(AttributeError) as excinfo: - mm.adaptive_regression(outputs, 0) - assert str(excinfo.value) == ('There are too few samples for the corrected loo-cv error. Fit surrogate on at least as ' - 'many samples as parameters to use this') - - -def test_adaptive_regression() -> None: - """ - Adaptive regression, no specific method - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1]]) - outputs = np.array([0.0, 0.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - - # Evaluate the univariate polynomials on InputSpace - if mm.meta_model_type.lower() != 'gpe': - mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints) - mm.adaptive_regression(outputs, 0) - - -def test_adaptive_regression_verbose() -> None: - """ - Adaptive regression, no specific method, verbose output - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1]]) - outputs = np.array([0.0, 0.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - - # Evaluate the univariate polynomials on InputSpace - if mm.meta_model_type.lower() != 'gpe': - mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints) - mm.adaptive_regression(outputs, 0, True) - - -def test_adaptive_regression_ols() -> None: - """ - Adaptive regression, ols - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], - [0.9], [1.0]]) - outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - - # Evaluate the univariate polynomials on InputSpace - if mm.meta_model_type.lower() != 'gpe': - mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints) - mm.pce_reg_method = 'ols' - mm.adaptive_regression(outputs, 0) - - -#%% Test MetaModel.corr_loocv_error - -def test_corr_loocv_error_nosparse() -> None: - """ - Corrected loocv error - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], - [0.8], [0.9], [1.0]]) - outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - outs = mm.regression(samples, outputs, psi, reg_method='ebl') - corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'], - outputs) - - -def test_corr_loocv_error_singley() -> None: - """ - Corrected loocv error - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.2]]) - outputs = np.array([0.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - outs = mm.regression(samples, outputs, psi, reg_method='ols') - corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'], - outputs) - - -def test_corr_loocv_error_sparse() -> None: - """ - Corrected loocv error from sparse results - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], - [0.8], [0.9], [1.0]]) - outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) - - mm.CollocationPoints = samples - mm.build_metamodel(n_init_samples=2) - BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] - univ_bas = mm.univ_basis_vals(samples) - psi = create_psi(BasisIndices, univ_bas) - - outs = mm.regression(samples, outputs, psi, reg_method='ebl', - sparsity=True) - corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'], - outputs) - #%% Test MetaModel.pca_transformation @@ -924,7 +168,7 @@ def test_pca_transformation() -> None: inp.Marginals[0].parameters = [0, 1] mm = MetaModel(inp) outputs = np.array([[0.4, 0.4], [0.5, 0.6]]) - mm.pca_transformation(outputs) + mm.pca_transformation(outputs, 1) def test_pca_transformation_varcomp() -> None: @@ -938,7 +182,7 @@ def test_pca_transformation_varcomp() -> None: mm = MetaModel(inp) outputs = np.array([[0.4, 0.4], [0.5, 0.6]]) mm.var_pca_threshold = 1 - mm.pca_transformation(outputs) + mm.pca_transformation(outputs, 1) def test_pca_transformation_ncomp() -> None: @@ -950,55 +194,8 @@ def test_pca_transformation_ncomp() -> None: inp.Marginals[0].dist_type = 'normal' inp.Marginals[0].parameters = [0, 1] mm = MetaModel(inp) - outputs = np.array([[0.4, 0.4], [0.5, 0.6]]) - mm.n_pca_components = 1 - mm.pca_transformation(outputs) - - -#%% Test MetaModel.gaussian_process_emulator - -def test_gaussian_process_emulator() -> None: - """ - Create GPE - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5]) - - -def test_gaussian_process_emulator_nug() -> None: - """ - Create GPEwith nugget - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5], nug_term=1.0) - - -def test_gaussian_process_emulator_autosel() -> None: - """ - Fit MetaModel with autoselect - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5], autoSelect=True) - - -def test_gaussian_process_emulator_varidx() -> None: - """ - Create GPE with var_idx - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5], varIdx=1) + outputs = np.array([[0.4, 0.4], [0.5, 0.6]]) + mm.pca_transformation(outputs, 1) #%% Test MetaModel.eval_metamodel @@ -1012,70 +209,10 @@ def test_eval_metamodel() -> None: inp.Marginals[0].dist_type = 'normal' inp.Marginals[0].parameters = [0, 1] mm = MetaModel(inp) - mm.out_names = ['Z'] - mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) - mm.eval_metamodel([[0.4]]) - - -def test_eval_metamodel_normalboots() -> None: - """ - Eval trained MetaModel with normal bootstrap - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - mm.bootstrap_method = 'normal' - mm.out_names = ['Z'] - mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) - mm.eval_metamodel([[0.4]]) - - -def test_eval_metamodel_highnormalboots() -> None: - """ - Eval trained MetaModel with higher bootstrap-itrs - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - mm.n_bootstrap_itrs = 2 - mm.out_names = ['Z'] - mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) - mm.eval_metamodel([[0.4]]) - - -def test_eval_metamodel_gpe() -> None: - """ - Eval trained MetaModel - gpe - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - mm.meta_model_type = 'gpe' - mm.out_names = ['Z'] - mm.fit([[0.2], [0.8]], {'Z': np.array([[0.4], [0.5]])}) - mm.eval_metamodel([[0.4]]) + out = mm.eval_metamodel([[0.4]]) + assert len(out) == 2 -def test_eval_metamodel_pca() -> None: - """ - Eval trained MetaModel with pca - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - mm.dim_red_method = 'pca' - mm.out_names = ['Z'] - mm.fit([[0.2], [0.8]], {'Z': [[0.4, 0.4], [0.5, 0.6]]}) - mm.eval_metamodel([[0.4]]) - #%% Test MetaModel.create_model_error # TODO: move model out of this function @@ -1115,7 +252,7 @@ def test_copy_meta_model_opts() -> None: #%% Test Engine._compute_pce_moments -def test__compute_pce_moments() -> None: +def test__compute_moments() -> None: """ Compute moments of a pce-surrogate """ @@ -1125,37 +262,9 @@ def test__compute_pce_moments() -> None: inp.Marginals[0].parameters = [0, 1] mm = MetaModel(inp) mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) - mm._compute_pce_moments() + mm._compute_moments() -def test__compute_pce_moments_pca() -> None: - """ - Compute moments of a pce-surrogate with pca - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - mm.dim_red_method = 'pca' - mm.fit([[0.2], [0.8]], {'Z': [[0.4, 0.4], [0.5, 0.6]]}) - mm._compute_pce_moments() - - -def test__compute_pce_moments_gpe() -> None: - """ - Compute moments of a gpe-surrogate - """ - inp = Input() - inp.add_marginals() - inp.Marginals[0].dist_type = 'normal' - inp.Marginals[0].parameters = [0, 1] - mm = MetaModel(inp) - mm.meta_model_type = 'gpe' - with pytest.raises(AttributeError) as excinfo: - mm._compute_pce_moments() - assert str(excinfo.value) == 'Moments can only be computed for pce-type surrogates' - #%% Test MetaModel.update_metamodel # TODO: taken from engine diff --git a/tests/test_PolynomialChaosEmulator.py b/tests/test_PolynomialChaosEmulator.py new file mode 100644 index 000000000..c15f9e139 --- /dev/null +++ b/tests/test_PolynomialChaosEmulator.py @@ -0,0 +1,952 @@ +# -*- coding: utf-8 -*- +""" +Test the MetaModel class in bayesvalidrox. +Tests are available for the following functions +Class MetaModel: + build_metamodel - x + update_metamodel + update_pce_coeffs + create_basis_indices --removed, just redirects + add_InputSpace -x + univ_basis_vals + fit + adaptive_regression + pca_transformation + eval_metamodel + create_model_error + eval_model_error + auto_vivification + copy_meta_model_opts + __select_degree + generate_polynomials + _compute_pce_moments + +""" +import numpy as np +import pytest +import sys + +sys.path.append("src/") + +from bayesvalidrox.surrogate_models.inputs import Input +from bayesvalidrox.surrogate_models.input_space import InputSpace +from bayesvalidrox.surrogate_models.surrogate_models import MetaModel +from bayesvalidrox.surrogate_models.polynomial_chaos import PCE +from bayesvalidrox.surrogate_models.supplementary import gaussian_process_emulator, corr_loocv_error, create_psi + + + +#%% Test MetaMod constructor on its own + +def test_metamod() -> None: + """ + Construct PCE without inputs + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + PCE(inp) + + +#%% Test PCE.build_metamodel + +def test_build_metamodel_nosamples() -> None: + """ + Build PCE without collocation samples + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + with pytest.raises(AttributeError) as excinfo: + mm.build_metamodel() + assert str(excinfo.value) == 'Please provide samples to the metamodel before building it.' + + +def test_build_metamodel() -> None: + """ + Build PCE + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.CollocationPoints = np.array([[0.2], [0.8]]) + mm.build_metamodel() + + +def test_build_metamodel_ninitsamples() -> None: + """ + Build PCE with n_init_samples + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.CollocationPoints = np.array([[0.2], [0.8]]) + mm.build_metamodel(n_init_samples=2) + + + +def test_build_metamodel_coldimerr() -> None: + """ + Build PCE with wrong shape collocation samples + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.CollocationPoints = [[0.2, 0.8]] + with pytest.raises(AttributeError) as excinfo: + mm.build_metamodel() + assert str( + excinfo.value) == 'The second dimension of X should be the same size as the number of marginals in the InputObj' + + +#%% Test MetaMod.generate_polynomials + +def test_generate_polynomials_noexp() -> None: + """ + Generate polynomials without ExpDeg + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + with pytest.raises(AttributeError) as excinfo: + mm.generate_polynomials() + assert str(excinfo.value) == 'Generate or add InputSpace before generating polynomials' + + +def test_generate_polynomials_nodeg() -> None: + """ + Generate polynomials without max_deg + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + + # Setup + mm.InputSpace = InputSpace(inp) + mm.InputSpace.n_init_samples = 2 + mm.InputSpace.init_param_space(np.max(mm.pce_deg)) + mm.ndim = mm.InputSpace.ndim + mm.n_params = len(mm.input_obj.Marginals) + + # Generate + with pytest.raises(AttributeError) as excinfo: + mm.generate_polynomials() + assert str(excinfo.value) == 'PCE cannot generate polynomials in the given scenario!' + + +def test_generate_polynomials_deg() -> None: + """ + Generate polynomials with max_deg + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + + # Setup + mm.InputSpace = InputSpace(inp) + mm.InputSpace.n_init_samples = 2 + mm.InputSpace.init_param_space(np.max(mm.pce_deg)) + mm.ndim = mm.InputSpace.ndim + mm.n_params = len(mm.input_obj.Marginals) + + # Generate + mm.generate_polynomials(4) + + +#%% Test MetaMod.add_InputSpace + +def test_add_inputspace() -> None: + """ + Add InputSpace in PCE + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.add_InputSpace() + + +#%% Test PCE.fit +# Faster without these +def test_fit() -> None: + """ + Fit PCE + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) + + +def test_fit_parallel() -> None: + """ + Fit PCE in parallel + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}, parallel=True) + + +def test_fit_verbose() -> None: + """ + Fit PCE verbose + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}, verbose=True) + + +def test_fit_pca() -> None: + """ + Fit PCE verbose and with pca + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.dim_red_method = 'pca' + mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}, verbose=True) + + + +#%% Test PCE.regression + +def test_regression() -> None: + """ + Regression without a method + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi) + + +def test_regression_ols() -> None: + """ + Regression: ols + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='ols') + + +def test_regression_olssparse() -> None: + """ + Regression: ols and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='ols', sparsity=True) + + +def test_regression_ard() -> None: + """ + Regression: ard + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2], [0.8]]) + outputs = np.array([0.4, 0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='ard') + + +def test_regression_ardssparse() -> None: + """ + Regression: ard and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2], [0.8]]) + outputs = np.array([0.4, 0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='ard', sparsity=True) + + +def test_regression_fastard() -> None: + """ + Regression: fastard + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='fastard') + + +def test_regression_fastardssparse() -> None: + """ + Regression: fastard and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='fastard', sparsity=True) + + +def test_regression_brr() -> None: + """ + Regression: brr + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='brr') + + +def test_regression_brrssparse() -> None: + """ + Regression: brr and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='brr', sparsity=True) + + +if 0: # Could not figure out these errors, issue most likely in chosen samples/outputs + def test_regression_bcs() -> None: + """ + Regression: bcs + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9]]) + outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]) + mm.pce_deg = 3 + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(mm.pce_deg)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='bcs') + + + def test_regression_bcsssparse() -> None: + """ + Regression: bcs and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]]) + outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='bcs', sparsity=True) + + +def test_regression_lars() -> None: + """ + Regression: lars + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]]) + outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='lars') + + +def test_regression_larsssparse() -> None: + """ + Regression: lars and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]]) + outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='lars', sparsity=True) + + +def test_regression_sgdr() -> None: + """ + Regression: sgdr + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='sgdr') + + +def test_regression_sgdrssparse() -> None: + """ + Regression: sgdr and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='sgdr', sparsity=True) + + +if 0: # Could not figure out these errors, issue most likely in chosen samples/outputs + def test_regression_omp() -> None: + """ + Regression: omp + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], [0.9], [1.0]]) + outputs = np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='omp') + + + def test_regression_ompssparse() -> None: + """ + Regression: omp and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='omp', sparsity=True) + + +def test_regression_vbl() -> None: + """ + Regression: vbl + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='vbl') + + +def test_regression_vblssparse() -> None: + """ + Regression: vbl and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='vbl', sparsity=True) + + +def test_regression_ebl() -> None: + """ + Regression: ebl + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='ebl') + + +def test_regression_eblssparse() -> None: + """ + Regression: ebl and sparse + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.5]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + mm.regression(samples, outputs, psi, reg_method='ebl', sparsity=True) + + +#%% Test Model.update_pce_coeffs + +# TODO: very linked to the actual training... + +#%% Test PCE.univ_basis_vals + +def test_univ_basis_vals() -> None: + """ + Creates univariate polynomials + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2], [0.8]]) + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + mm.univ_basis_vals(samples) + + +#%% Test PCE.adaptive_regression + +def test_adaptive_regression_fewsamples() -> None: + """ + Adaptive regression, no specific method, too few samples given + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.8]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + + # Evaluate the univariate polynomials on InputSpace + if mm.meta_model_type.lower() != 'gpe': + mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints) + + with pytest.raises(AttributeError) as excinfo: + mm.adaptive_regression(outputs, 0) + assert str(excinfo.value) == ('There are too few samples for the corrected loo-cv error. Fit surrogate on at least as ' + 'many samples as parameters to use this') + + +def test_adaptive_regression() -> None: + """ + Adaptive regression, no specific method + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.0], [0.1]]) + outputs = np.array([0.0, 0.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + + # Evaluate the univariate polynomials on InputSpace + if mm.meta_model_type.lower() != 'gpe': + mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints) + mm.adaptive_regression(outputs, 0) + + +def test_adaptive_regression_verbose() -> None: + """ + Adaptive regression, no specific method, verbose output + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.0], [0.1]]) + outputs = np.array([0.0, 0.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + + # Evaluate the univariate polynomials on InputSpace + if mm.meta_model_type.lower() != 'gpe': + mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints) + mm.adaptive_regression(outputs, 0, True) + + +def test_adaptive_regression_ols() -> None: + """ + Adaptive regression, ols + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], [0.8], + [0.9], [1.0]]) + outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + + # Evaluate the univariate polynomials on InputSpace + if mm.meta_model_type.lower() != 'gpe': + mm.univ_p_val = mm.univ_basis_vals(mm.CollocationPoints) + mm.pce_reg_method = 'ols' + mm.adaptive_regression(outputs, 0) + + +#%% Test PCE.pca_transformation + +def test_pca_transformation() -> None: + """ + Apply PCA + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + outputs = np.array([[0.4, 0.4], [0.5, 0.6]]) + mm.pca_transformation(outputs, 1) + + +def test_pca_transformation_varcomp() -> None: + """ + Apply PCA with set var_pca_threshold + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + outputs = np.array([[0.4, 0.4], [0.5, 0.6]]) + mm.var_pca_threshold = 1 + mm.pca_transformation(outputs, 1) + + + +#%% Test PCE.eval_metamodel + +def test_eval_metamodel() -> None: + """ + Eval trained PCE + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.out_names = ['Z'] + mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) + mm.eval_metamodel([[0.4]]) + + +def test_eval_metamodel_normalboots() -> None: + """ + Eval trained PCE with normal bootstrap + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.bootstrap_method = 'normal' + mm.out_names = ['Z'] + mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) + mm.eval_metamodel([[0.4]]) + + +def test_eval_metamodel_highnormalboots() -> None: + """ + Eval trained PCE with higher bootstrap-itrs + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.n_bootstrap_itrs = 2 + mm.out_names = ['Z'] + mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) + mm.eval_metamodel([[0.4]]) + + +def test_eval_metamodel_pca() -> None: + """ + Eval trained PCE with pca + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.dim_red_method = 'pca' + mm.out_names = ['Z'] + mm.fit([[0.2], [0.8]], {'Z': [[0.4, 0.4], [0.5, 0.6]]}) + mm.eval_metamodel([[0.4]]) + + +#%% Test PCE.create_model_error +# TODO: move model out of this function + +#%% Test PCE.eval_model_error +# TODO: test create_model_error first + +#%% Test PCE.auto_vivification +def test_auto_vivification() -> None: + """ + Creation of auto-vivification objects + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.auto_vivification() + + +#%% Test PCE.copy_meta_model_opts + +def test_copy_meta_model_opts() -> None: + """ + Copy the PCE with just some stats + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.add_InputSpace() + mm.copy_meta_model_opts() + + +#%% Test PCE.__select_degree + +#%% Test Engine._compute_moments + +def test__compute_moments() -> None: + """ + Compute moments of a pce-surrogate + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.fit([[0.2], [0.4], [0.8]], {'Z': [[0.4], [0.2], [0.5]]}) + mm._compute_moments() + + +def test__compute_moments_pca() -> None: + """ + Compute moments of a pce-surrogate with pca + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = PCE(inp) + mm.dim_red_method = 'pca' + mm.fit([[0.2], [0.8]], {'Z': [[0.4, 0.4], [0.5, 0.6]]}) + mm._compute_moments() + + +#%% Test PCE.update_metamodel +# TODO: taken from engine diff --git a/tests/test_supplementary.py b/tests/test_supplementary.py new file mode 100644 index 000000000..7537b0ca3 --- /dev/null +++ b/tests/test_supplementary.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +""" +Test the supplementary functions in bayesvalidrox. +Tests are available for the following functions + hellinger_distance + logpdf + subdomain + create_psi x + corr_loocv_error x + gaussian_process_emulator x + +""" +import numpy as np +import pytest +import sys + +sys.path.append("src/") + +from bayesvalidrox.surrogate_models.inputs import Input +from bayesvalidrox.surrogate_models.input_space import InputSpace +from bayesvalidrox.surrogate_models.surrogate_models import MetaModel +from bayesvalidrox.surrogate_models.polynomial_chaos import PCE +from bayesvalidrox.surrogate_models.supplementary import gaussian_process_emulator, corr_loocv_error, create_psi + + +#%% Test create_psi + +def test_create_psi() -> None: + """ + Create psi-matrix + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = MetaModel(inp) + samples = np.array([[0.2], [0.8]]) + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + create_psi(BasisIndices, univ_bas) + +#%% Test corr_loocv_error + +def test_corr_loocv_error_nosparse() -> None: + """ + Corrected loocv error + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = MetaModel(inp) + samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], + [0.8], [0.9], [1.0]]) + outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + outs = mm.regression(samples, outputs, psi, reg_method='ebl') + corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'], + outputs) + + +def test_corr_loocv_error_singley() -> None: + """ + Corrected loocv error + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = MetaModel(inp) + samples = np.array([[0.2]]) + outputs = np.array([0.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + outs = mm.regression(samples, outputs, psi, reg_method='ols') + corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'], + outputs) + + +def test_corr_loocv_error_sparse() -> None: + """ + Corrected loocv error from sparse results + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + mm = MetaModel(inp) + samples = np.array([[0.0], [0.1], [0.2], [0.3], [0.4], [0.5], [0.6], [0.7], + [0.8], [0.9], [1.0]]) + outputs = np.array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.1]) + + mm.CollocationPoints = samples + mm.build_metamodel(n_init_samples=2) + BasisIndices = mm.allBasisIndices[str(1)][str(1.0)] + univ_bas = mm.univ_basis_vals(samples) + psi = create_psi(BasisIndices, univ_bas) + + outs = mm.regression(samples, outputs, psi, reg_method='ebl', + sparsity=True) + corr_loocv_error(outs['clf_poly'], outs['sparePsi'], outs['coeffs'], + outputs) + + +#%% Test gaussian_process_emulator + +def test_gaussian_process_emulator() -> None: + """ + Create GPE + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5]) + + +def test_gaussian_process_emulator_nug() -> None: + """ + Create GPEwith nugget + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5], nug_term=1.0) + + +def test_gaussian_process_emulator_autosel() -> None: + """ + Fit MetaModel with autoselect + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5], autoSelect=True) + + +def test_gaussian_process_emulator_varidx() -> None: + """ + Create GPE with var_idx + """ + inp = Input() + inp.add_marginals() + inp.Marginals[0].dist_type = 'normal' + inp.Marginals[0].parameters = [0, 1] + gaussian_process_emulator([[0.2], [0.8]], [0.4, 0.5], varIdx=1) + -- GitLab