diff --git a/examples/OHagan-function/test_OHagan.py b/examples/OHagan-function/test_OHagan.py index 4715ecc525c2a016e078753b63016e8705a77888..2fc629d3423d7ef3312e0182a8f15d1f0fd18ba0 100644 --- a/examples/OHagan-function/test_OHagan.py +++ b/examples/OHagan-function/test_OHagan.py @@ -1,7 +1,8 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -This test deals with the surrogate modeling of a Ishigami function. +This test deals with the surrogate modeling of O'Hagan function with 15 +parameters. You will see how to: Check the quality of your regression model @@ -30,6 +31,7 @@ sys.path.append("../../src/bayesvalidrox/") from pylink.pylink import PyLinkForwardModel from surrogate_models.inputs import Input from surrogate_models.surrogate_models import MetaModel +from surrogate_models.meta_model_engine import MetaModelEngine from post_processing.post_processing import PostProcessing from bayes_inference.bayes_inference import BayesInference from bayes_inference.discrepancy import Discrepancy @@ -87,11 +89,6 @@ if __name__ == "__main__": # 9)EBL: Emperical Bayesian Learning MetaModelOpts.pce_reg_method = 'FastARD' - # Bootstraping - # 1) normal 2) fast - MetaModelOpts.bootstrap_method = 'fast' - MetaModelOpts.n_bootstrap_itrs = 1#00 - # Specify the max degree to be compared by the adaptive algorithm: # The degree with the lowest Leave-One-Out cross-validation (LOO) # error (or the highest score=1-LOO)estimator is chosen as the final @@ -99,7 +96,7 @@ if __name__ == "__main__": MetaModelOpts.pce_deg = 7 # q-quasi-norm 0<q<1 (default=1) - MetaModelOpts.pce_q_norm = 0.5 + MetaModelOpts.pce_q_norm = 0.65 # Print summary of the regression results # MetaModelOpts.verbose = True @@ -116,10 +113,10 @@ if __name__ == "__main__": # Sampling methods # 1) random 2) latin_hypercube 3) sobol 4) halton 5) hammersley 6) korobov # 7) chebyshev(FT) 8) grid(FT) 9) nested_grid(FT) 10)user - MetaModelOpts.ExpDesign.sampling_method = 'user' + MetaModelOpts.ExpDesign.sampling_method = 'latin_hypercube' # Provide the experimental design object with a hdf5 file - MetaModelOpts.ExpDesign.hdf5_file = 'ExpDesign_OHagan_orig.hdf5' + # MetaModelOpts.ExpDesign.hdf5_file = 'ExpDesign_OHagan_orig.hdf5' # Sequential experimental design (needed only for sequential ExpDesign) MetaModelOpts.ExpDesign.n_new_samples = 1 @@ -166,9 +163,6 @@ if __name__ == "__main__": 'Z': np.load("data/valid_outputs.npy") } # >>>>>>>>>>>>>>>>>>>>>> Build Surrogate <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< - # Adaptive sparse arbitrary polynomial chaos expansion - # PCEModel = MetaModelOpts.create_metamodel() - from surrogate_models.meta_model_engine import MetaModelEngine meta_model_engine = MetaModelEngine(MetaModelOpts) meta_model_engine.run() PCEModel = meta_model_engine.MetaModel diff --git a/examples/borehole/borehole.py b/examples/borehole/borehole.py index 43dd71f21904a7ca7bec4e45a48fd3a37cc33879..b2a0dcd4a08091e05b74cceae3eda3709308ae49 100644 --- a/examples/borehole/borehole.py +++ b/examples/borehole/borehole.py @@ -1,9 +1,16 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -Created on Mon Nov 19 08:56:21 2018 +Author: Farid Mohammadi, M.Sc. +E-Mail: farid.mohammadi@iws.uni-stuttgart.de +Department of Hydromechanics and Modelling of Hydrosystems (LH2) +Institute for Modelling Hydraulic and Environmental Systems (IWS), University +of Stuttgart, www.iws.uni-stuttgart.de/lh2/ +Pfaffenwaldring 61 +70569 Stuttgart + +Created on Mon Sep 12 2022 -@author: farid """ import numpy as np diff --git a/examples/borehole/test_borehole.py b/examples/borehole/test_borehole.py index 3097b78bc8fd221d18f254b2426b9fdff00987e8..e28e9a30f76afa9deb158e23532af12623f2d8e2 100644 --- a/examples/borehole/test_borehole.py +++ b/examples/borehole/test_borehole.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ -This test deals with the surrogate modeling of a Ishigami function. +This test deals with the surrogate modeling of a Borehole function. You will see how to: Check the quality of your regression model @@ -15,7 +15,7 @@ of Stuttgart, www.iws.uni-stuttgart.de/lh2/ Pfaffenwaldring 61 70569 Stuttgart -Created on Wed Jul 10 2019 +Created on Sep 12 2022 """ @@ -33,6 +33,8 @@ from surrogate_models.surrogate_models import MetaModel from post_processing.post_processing import PostProcessing from bayes_inference.bayes_inference import BayesInference from bayes_inference.discrepancy import Discrepancy +from surrogate_models.meta_model_engine import MetaModelEngine + import matplotlib matplotlib.use('agg') @@ -127,11 +129,6 @@ if __name__ == "__main__": # 9)EBL: Emperical Bayesian Learning MetaModelOpts.pce_reg_method = 'OMP' - # Bootstraping - # 1) normal 2) fast - MetaModelOpts.bootstrap_method = 'fast' - MetaModelOpts.n_bootstrap_itrs = 1#00 - # Specify the max degree to be compared by the adaptive algorithm: # The degree with the lowest Leave-One-Out cross-validation (LOO) # error (or the highest score=1-LOO)estimator is chosen as the final @@ -163,7 +160,7 @@ if __name__ == "__main__": # Sequential experimental design (needed only for sequential ExpDesign) MetaModelOpts.ExpDesign.n_new_samples = 1 - MetaModelOpts.ExpDesign.n_max_samples = 300 # 150 + MetaModelOpts.ExpDesign.n_max_samples = 300 MetaModelOpts.ExpDesign.mod_LOO_threshold = 1e-16 # ------------------------------------------------ @@ -206,9 +203,7 @@ if __name__ == "__main__": 'flow rate [m$^3$/yr]': np.load("data/valid_outputs.npy") } # >>>>>>>>>>>>>>>>>>>>>> Build Surrogate <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< - # Adaptive sparse arbitrary polynomial chaos expansion - # PCEModel = MetaModelOpts.create_metamodel() - from surrogate_models.meta_model_engine import MetaModelEngine + # Use MetaModelEngine for sequential experimental design meta_model_engine = MetaModelEngine(MetaModelOpts) meta_model_engine.run() PCEModel = meta_model_engine.MetaModel diff --git a/examples/ishigami/test_ishigami.py b/examples/ishigami/test_ishigami.py index c7db9c4bfb8fe94cb194043836138162624949e2..313ad38280acfe6bf3a4b8c29b1f94d74da399b7 100644 --- a/examples/ishigami/test_ishigami.py +++ b/examples/ishigami/test_ishigami.py @@ -30,6 +30,7 @@ sys.path.append("../../src/bayesvalidrox/") from pylink.pylink import PyLinkForwardModel from surrogate_models.inputs import Input from surrogate_models.surrogate_models import MetaModel +from surrogate_models.meta_model_engine import MetaModelEngine from post_processing.post_processing import PostProcessing from bayes_inference.bayes_inference import BayesInference from bayes_inference.discrepancy import Discrepancy @@ -94,11 +95,6 @@ if __name__ == "__main__": # 9)EBL: Emperical Bayesian Learning MetaModelOpts.pce_reg_method = 'BCS' - # Bootstraping - # 1) normal 2) fast - MetaModelOpts.bootstrap_method = 'fast' - MetaModelOpts.n_bootstrap_itrs = 100 - # Specify the max degree to be compared by the adaptive algorithm: # The degree with the lowest Leave-One-Out cross-validation (LOO) # error (or the highest score=1-LOO)estimator is chosen as the final @@ -117,7 +113,7 @@ if __name__ == "__main__": MetaModelOpts.add_ExpDesign() # One-shot (normal) or Sequential Adaptive (sequential) Design - MetaModelOpts.ExpDesign.method = 'sequential' + MetaModelOpts.ExpDesign.method = 'normal' MetaModelOpts.ExpDesign.n_init_samples = 50 # Sampling methods @@ -171,9 +167,6 @@ if __name__ == "__main__": MetaModelOpts.valid_samples = np.load("data/valid_samples.npy") MetaModelOpts.valid_model_runs = {'Z': np.load("data/valid_outputs.npy")} # >>>>>>>>>>>>>>>>>>>>>> Build Surrogate <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< - # Adaptive sparse arbitrary polynomial chaos expansion - # PCEModel = MetaModelOpts.create_metamodel() - from surrogate_models.meta_model_engine import MetaModelEngine meta_model_engine = MetaModelEngine(MetaModelOpts) meta_model_engine.run() PCEModel = meta_model_engine.MetaModel diff --git a/examples/model-comparison/test_model_comparison.py b/examples/model-comparison/test_model_comparison.py index b353cca28ce153a1c5f738f83e710a2a1dc88a08..d5f523871d99b5115437c2f66e8a1ce2a845f503 100644 --- a/examples/model-comparison/test_model_comparison.py +++ b/examples/model-comparison/test_model_comparison.py @@ -254,22 +254,7 @@ if __name__ == "__main__": "cosine": NL4_MetaModel } - # Option I: MCMC inference method - mcmc_params = { - 'n_steps': 1e5, - 'n_walkers': 30, - 'multiprocessing': False, - 'verbose': False - } - opts_mcmc = { - "inference_method": "MCMC", - "mcmc_params": mcmc_params, - "Discrepancy": DiscrepancyOpts, - "emulator": True, - "plot_post_pred": True - } - - # Option II: BME Bootstrap + # BME Bootstrap optuions opts_bootstrap = { "bootstrap": True, "n_samples": 10000, diff --git a/examples/pollution/test_pollution.py b/examples/pollution/test_pollution.py index cbb897481121a75c8bb7d604abd6e8e3cbff5036..f2de52e2caf0196d8d05a2035950e2f28ff8fc70 100644 --- a/examples/pollution/test_pollution.py +++ b/examples/pollution/test_pollution.py @@ -21,7 +21,6 @@ import numpy as np import pandas as pd import joblib - # import bayesvalidrox # Add BayesValidRox path import sys @@ -142,7 +141,7 @@ if __name__ == "__main__": # One-shot (normal) or Sequential Adaptive (sequential) Design MetaModelOpts.ExpDesign.method = 'normal' - MetaModelOpts.ExpDesign.n_init_samples = 150 # 5*ndim + MetaModelOpts.ExpDesign.n_init_samples = 150 # Sampling methods # 1) random 2) latin_hypercube 3) sobol 4) halton 5) hammersley 6) korobov diff --git a/examples/pollution/test_valid_pollution.py b/examples/pollution/test_valid_pollution.py index a19e7b6a0f81b54e17e51f7bcfa5d8adfdd7dde0..e2aacbe5d2186c38287e552ae588332150dd519b 100644 --- a/examples/pollution/test_valid_pollution.py +++ b/examples/pollution/test_valid_pollution.py @@ -18,10 +18,8 @@ Created on Fri Aug 9 2019 """ import numpy as np -import pandas as pd import joblib - # import bayesvalidrox # Add BayesValidRox path import sys @@ -129,7 +127,7 @@ if __name__ == "__main__": # The degree with the lowest Leave-One-Out cross-validation (LOO) # error (or the highest score=1-LOO)estimator is chosen as the final # metamodel. pce_deg accepts degree as a scalar or a range. - MetaModelOpts.pce_deg = np.arange(6) + MetaModelOpts.pce_deg = 6 # q-quasi-norm 0<q<1 (default=1) # MetaModelOpts.pce_q_norm = 0.75 @@ -142,7 +140,7 @@ if __name__ == "__main__": # One-shot (normal) or Sequential Adaptive (sequential) Design MetaModelOpts.ExpDesign.method = 'normal' - MetaModelOpts.ExpDesign.n_init_samples = 150 # 5*ndim + MetaModelOpts.ExpDesign.n_init_samples = 150 # Sampling methods # 1) random 2) latin_hypercube 3) sobol 4) halton 5) hammersley 6) korobov