From 9f322f7aac5ca6b5025d076a4d775cea658a9e26 Mon Sep 17 00:00:00 2001 From: Farid Mohammadi <farid.mohammadi@iws.uni-stuttgart.de> Date: Wed, 16 Feb 2022 11:51:58 +0100 Subject: [PATCH] [bayes_inference] update paramter names from ExpDesign object. --- .../bayes_inference/bayes_inference.py | 4 ++-- src/bayesvalidrox/bayes_inference/mcmc.py | 20 +++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/bayesvalidrox/bayes_inference/bayes_inference.py b/src/bayesvalidrox/bayes_inference/bayes_inference.py index 85f68c4aa..3ba266cb9 100644 --- a/src/bayesvalidrox/bayes_inference/bayes_inference.py +++ b/src/bayesvalidrox/bayes_inference/bayes_inference.py @@ -1034,10 +1034,10 @@ class BayesInference: axes = np.array(figPosterior.axes).reshape((len(par_names), len(par_names))) for yi in range(len(par_names)): ax = axes[yi, yi] - ax.set_xlim(PCEModel.BoundTuples[yi]) + ax.set_xlim(PCEModel.bound_tuples[yi]) for xi in range(yi): ax = axes[yi, xi] - ax.set_xlim(PCEModel.BoundTuples[xi]) + ax.set_xlim(PCEModel.bound_tuples[xi]) # Extract the axes # axes = np.array(figPosterior.axes).reshape((NofPa, NofPa)) diff --git a/src/bayesvalidrox/bayes_inference/mcmc.py b/src/bayesvalidrox/bayes_inference/mcmc.py index 1c918be1a..4edf8be72 100755 --- a/src/bayesvalidrox/bayes_inference/mcmc.py +++ b/src/bayesvalidrox/bayes_inference/mcmc.py @@ -95,16 +95,16 @@ class MCMC(): # Pick samples based on a uniform dist between min and max of # each dim initsamples = np.zeros((self.nwalkers, ndim)) - BoundTuples = [] + bound_tuples = [] for idxDim in range(ndim): lower = np.min(self.initsamples[:, idxDim]) upper = np.max(self.initsamples[:, idxDim]) - BoundTuples.append((lower, upper)) + bound_tuples.append((lower, upper)) dist = st.uniform(loc=lower, scale=upper-lower) initsamples[:, idxDim] = dist.rvs(size=self.nwalkers) # Update lower and upper - PCEModel.ExpDesign.BoundTuples = BoundTuples + PCEModel.ExpDesign.bound_tuples = bound_tuples # Check if sigma^2 needs to be inferred if Discrepancy.optSigma != 'B': @@ -116,8 +116,8 @@ class MCMC(): # Update ndim ndim = initsamples.shape[1] - # Update BoundTuples - PCEModel.ExpDesign.BoundTuples += Discrepancy.ExpDesign.BoundTuples + # Update bound_tuples + PCEModel.ExpDesign.bound_tuples += Discrepancy.ExpDesign.bound_tuples print("\n>>>> Bayesian inference with MCMC for " f"{self.BayesOpts.Name} started. <<<<<<") @@ -278,7 +278,7 @@ class MCMC(): for i in range(len(Discrepancy.InputDisc.Marginals)): par_names.append(Discrepancy.InputDisc.Marginals[i].Name) - params_range = PCEModel.ExpDesign.BoundTuples + params_range = PCEModel.ExpDesign.bound_tuples # Plot traces if self.verbose and self.nsteps < 10000: @@ -358,9 +358,9 @@ class MCMC(): PCEModel = self.BayesOpts.PCEModel Discrepancy = self.BayesOpts.Discrepancy - nSigma2 = len(Discrepancy.ExpDesign.BoundTuples) if Discrepancy.optSigma != 'B' else -len(theta) + nSigma2 = len(Discrepancy.ExpDesign.bound_tuples) if Discrepancy.optSigma != 'B' else -len(theta) priorDist = PCEModel.ExpDesign.priorSpace - params_range = PCEModel.ExpDesign.BoundTuples + params_range = PCEModel.ExpDesign.bound_tuples ndimTheta = theta.ndim theta = theta if ndimTheta != 1 else theta.reshape((1,-1)) nsamples = theta.shape[0] @@ -379,7 +379,7 @@ class MCMC(): # Check if bias term needs to be inferred if Discrepancy.optSigma != 'B': if self.check_ranges(theta[i,-nSigma2:], - Discrepancy.ExpDesign.BoundTuples): + Discrepancy.ExpDesign.bound_tuples): if all('unif' in Discrepancy.ExpDesign.InputObj.Marginals[i].DistType for i in \ range(Discrepancy.ExpDesign.ndim)): logprior[i] = 0.0 @@ -397,7 +397,7 @@ class MCMC(): PCEModel = BayesOpts.PCEModel Discrepancy = self.BayesOpts.Discrepancy if Discrepancy.optSigma != 'B': - nSigma2 = len(Discrepancy.ExpDesign.BoundTuples) + nSigma2 = len(Discrepancy.ExpDesign.bound_tuples) else: nSigma2 = -len(theta) # Check if bias term needs to be inferred -- GitLab