diff --git a/src/bayesvalidrox/bayes_inference/bayes_inference.py b/src/bayesvalidrox/bayes_inference/bayes_inference.py index 85f68c4aa3422afa8989d50c43e3f4f8063fd10d..3ba266cb9b3d10f90a80d4d2b7dd7484e6cf9e6f 100644 --- a/src/bayesvalidrox/bayes_inference/bayes_inference.py +++ b/src/bayesvalidrox/bayes_inference/bayes_inference.py @@ -1034,10 +1034,10 @@ class BayesInference: axes = np.array(figPosterior.axes).reshape((len(par_names), len(par_names))) for yi in range(len(par_names)): ax = axes[yi, yi] - ax.set_xlim(PCEModel.BoundTuples[yi]) + ax.set_xlim(PCEModel.bound_tuples[yi]) for xi in range(yi): ax = axes[yi, xi] - ax.set_xlim(PCEModel.BoundTuples[xi]) + ax.set_xlim(PCEModel.bound_tuples[xi]) # Extract the axes # axes = np.array(figPosterior.axes).reshape((NofPa, NofPa)) diff --git a/src/bayesvalidrox/bayes_inference/mcmc.py b/src/bayesvalidrox/bayes_inference/mcmc.py index 1c918be1a7004721eae339d7cad4e0d3d48e29a2..4edf8be7220a78e1a6b657062205c4258e648bbd 100755 --- a/src/bayesvalidrox/bayes_inference/mcmc.py +++ b/src/bayesvalidrox/bayes_inference/mcmc.py @@ -95,16 +95,16 @@ class MCMC(): # Pick samples based on a uniform dist between min and max of # each dim initsamples = np.zeros((self.nwalkers, ndim)) - BoundTuples = [] + bound_tuples = [] for idxDim in range(ndim): lower = np.min(self.initsamples[:, idxDim]) upper = np.max(self.initsamples[:, idxDim]) - BoundTuples.append((lower, upper)) + bound_tuples.append((lower, upper)) dist = st.uniform(loc=lower, scale=upper-lower) initsamples[:, idxDim] = dist.rvs(size=self.nwalkers) # Update lower and upper - PCEModel.ExpDesign.BoundTuples = BoundTuples + PCEModel.ExpDesign.bound_tuples = bound_tuples # Check if sigma^2 needs to be inferred if Discrepancy.optSigma != 'B': @@ -116,8 +116,8 @@ class MCMC(): # Update ndim ndim = initsamples.shape[1] - # Update BoundTuples - PCEModel.ExpDesign.BoundTuples += Discrepancy.ExpDesign.BoundTuples + # Update bound_tuples + PCEModel.ExpDesign.bound_tuples += Discrepancy.ExpDesign.bound_tuples print("\n>>>> Bayesian inference with MCMC for " f"{self.BayesOpts.Name} started. <<<<<<") @@ -278,7 +278,7 @@ class MCMC(): for i in range(len(Discrepancy.InputDisc.Marginals)): par_names.append(Discrepancy.InputDisc.Marginals[i].Name) - params_range = PCEModel.ExpDesign.BoundTuples + params_range = PCEModel.ExpDesign.bound_tuples # Plot traces if self.verbose and self.nsteps < 10000: @@ -358,9 +358,9 @@ class MCMC(): PCEModel = self.BayesOpts.PCEModel Discrepancy = self.BayesOpts.Discrepancy - nSigma2 = len(Discrepancy.ExpDesign.BoundTuples) if Discrepancy.optSigma != 'B' else -len(theta) + nSigma2 = len(Discrepancy.ExpDesign.bound_tuples) if Discrepancy.optSigma != 'B' else -len(theta) priorDist = PCEModel.ExpDesign.priorSpace - params_range = PCEModel.ExpDesign.BoundTuples + params_range = PCEModel.ExpDesign.bound_tuples ndimTheta = theta.ndim theta = theta if ndimTheta != 1 else theta.reshape((1,-1)) nsamples = theta.shape[0] @@ -379,7 +379,7 @@ class MCMC(): # Check if bias term needs to be inferred if Discrepancy.optSigma != 'B': if self.check_ranges(theta[i,-nSigma2:], - Discrepancy.ExpDesign.BoundTuples): + Discrepancy.ExpDesign.bound_tuples): if all('unif' in Discrepancy.ExpDesign.InputObj.Marginals[i].DistType for i in \ range(Discrepancy.ExpDesign.ndim)): logprior[i] = 0.0 @@ -397,7 +397,7 @@ class MCMC(): PCEModel = BayesOpts.PCEModel Discrepancy = self.BayesOpts.Discrepancy if Discrepancy.optSigma != 'B': - nSigma2 = len(Discrepancy.ExpDesign.BoundTuples) + nSigma2 = len(Discrepancy.ExpDesign.bound_tuples) else: nSigma2 = -len(theta) # Check if bias term needs to be inferred