From aeb08a74ad290542bbc178253d9574da8ac6e482 Mon Sep 17 00:00:00 2001
From: faridm69 <faridmohammadi69@gmail.com>
Date: Thu, 19 Nov 2020 16:01:09 +0100
Subject: [PATCH] [Bayesinference] clean up.

---
 .../BayesInference/BayesInference.py          | 45 ++++++++-----------
 BayesValidRox/BayesInference/MCMC.py          |  6 +--
 BayesValidRox/surrogate_models/ExpDesigns.py  |  8 ++--
 .../surrogate_models/surrogate_models.py      | 12 +----
 4 files changed, 28 insertions(+), 43 deletions(-)

diff --git a/BayesValidRox/BayesInference/BayesInference.py b/BayesValidRox/BayesInference/BayesInference.py
index 70404e348..f5a122ed1 100644
--- a/BayesValidRox/BayesInference/BayesInference.py
+++ b/BayesValidRox/BayesInference/BayesInference.py
@@ -86,8 +86,17 @@ class BayesInference:
         self.BootstrapNoise = 0.05
         self.MultiProcessMCMC = None
         
-        self.Corner_title_fmt = '.3f' #'.4e'
-        
+        self.Corner_title_fmt = '.3f'
+    
+    #--------------------------------------------------------------------------------------------------------
+    def _logpdf(self, x, mean, cov):
+        n = len(mean)
+        L = spla.cholesky(cov, lower=True)
+        beta = np.sum(np.log(np.diag(L)))
+        dev = x - mean
+        alpha = dev.dot(spla.cho_solve((L, True), dev))
+        return -0.5 * alpha - beta - n / 2. * np.log(2 * np.pi)
+    
     #--------------------------------------------------------------------------------------------------------
     def get_Sample(self):
         
@@ -144,14 +153,12 @@ class BayesInference:
     #--------------------------------------------------------------------------------------------------------
     
     def normpdf(self, Outputs, Data, TotalSigma2s, Sigma2=None):
-        
+
         Model = self.PCEModel.ModelObj
-        NofMeasurements = TotalSigma2s.shape[0]
         
         # Covariance Matrix 
-        covMatrix = np.zeros((NofMeasurements, NofMeasurements), float)
-        np.fill_diagonal(covMatrix, TotalSigma2s)
-        
+        covMatrix = np.diag(TotalSigma2s)
+
         # Extract the requested model outputs for likelihood calulation
         if self.ReqOutputType is None: OutputType = Model.Output.Names 
         else: 
@@ -161,30 +168,22 @@ class BayesInference:
         
         # Flatten the Output
         TotalOutputs = np.empty((SampleSize,0))
-        biasSigma2s = np.empty((0))
+        if Sigma2 is not None: biasSigma2s = np.empty((0))
         for idx, outputType in enumerate(OutputType):
             TotalOutputs = np.hstack((TotalOutputs, Outputs[outputType]))
             if Sigma2 is not None:
                 biasSigma2s = np.hstack((biasSigma2s, Sigma2[idx] * np.ones(shape=Outputs[outputType].shape[1])))
         
         # Add Sigma2 to covMatrix
-        if Sigma2 is not None:
-            covBias = np.zeros((NofMeasurements, NofMeasurements), float)
-            np.fill_diagonal(covBias, biasSigma2s)
-            covMatrix += covBias
-            
+        if Sigma2 is not None: covMatrix += np.diag(biasSigma2s)
+
         # Add the std of the PCE is chosen as emulator.
         if self.emulator:
-            covMatrix_PCE = np.zeros((NofMeasurements, NofMeasurements), float)
             stdPCE = np.empty((SampleSize,0))
             for outputType in OutputType:
                 stdPCE = np.hstack((stdPCE, self.stdPCEPriorPred[outputType]))
-
             # Expected value of variance (Assump: i.i.d stds)
-            varPCE = np.mean(stdPCE**2, axis=0)
-            # varPCE = np.var(stdPCE, axis=1)
-            np.fill_diagonal(covMatrix_PCE, varPCE)
-            covMatrix += covMatrix_PCE
+            covMatrix += np.diag(np.mean(stdPCE**2, axis=0))
         
         # Select the data points to compare
         if self.selectedIndices is not None:
@@ -197,14 +196,6 @@ class BayesInference:
             return self._logpdf(TotalOutputs[0], Data, covMatrix)
         else:
             return np.array([self._logpdf(output, Data, covMatrix) for output in TotalOutputs])
-
-    def _logpdf(self, x, mean, cov):
-        n = len(mean)
-        L = spla.cholesky(cov, lower=True)
-        beta = np.sum(np.log(np.diag(L)))
-        dev = x - mean
-        alpha = dev.dot(spla.cho_solve((L, True), dev))
-        return -0.5 * alpha - beta - n / 2. * np.log(2 * np.pi)
         
     #--------------------------------------------------------------------------------------------------------
     
diff --git a/BayesValidRox/BayesInference/MCMC.py b/BayesValidRox/BayesInference/MCMC.py
index 7dc0d8804..a297ccef8 100755
--- a/BayesValidRox/BayesInference/MCMC.py
+++ b/BayesValidRox/BayesInference/MCMC.py
@@ -27,14 +27,14 @@ class MCMC():
     ntemps = 20
     """
     def __init__(self, BayesOpts, initsamples = None, nwalkers = 100, nburn = 100, 
-                 nsteps = 100000, moves = emcee.moves.KDEMove(), multiprocessing=False, verbose = False):
+                 nsteps = 100000, moves = None, multiprocessing=False, verbose = False):
         
         self.BayesOpts      = BayesOpts
         self.initsamples    = initsamples
         self.nwalkers       = nwalkers
         self.nburn          = nburn
         self.nsteps         = nsteps
-        self.moves          = moves
+        self.moves          = [(emcee.moves.KDEMove(), 1.0)] if moves is None else moves
         self.mp             = multiprocessing
         self.verbose        = verbose
         
@@ -292,11 +292,11 @@ class MCMC():
         ndimTheta = theta.ndim
         theta = theta if ndimTheta != 1 else theta.reshape((1,-1))
         nsamples = theta.shape[0]
-
         logprior = -np.inf*np.ones(nsamples)
 
         for i in range(nsamples):
             if self.check_ranges(theta[i], params_range):
+                
                 logprior[i] = np.log(priorDist.pdf(theta[i,:-nSigma2]))
 
                 # Check if bias term needs to be inferred
diff --git a/BayesValidRox/surrogate_models/ExpDesigns.py b/BayesValidRox/surrogate_models/ExpDesigns.py
index e2c47c3bf..8df252bf2 100644
--- a/BayesValidRox/surrogate_models/ExpDesigns.py
+++ b/BayesValidRox/surrogate_models/ExpDesigns.py
@@ -67,7 +67,8 @@ class ExpDesigns:
 
         """
         Inputs = self.InputObj
-        NofPa = len(Inputs.Marginals) 
+        NofPa = len(Inputs.Marginals)
+        self.ndim = NofPa
         self.SamplingMethod = SamplingMethod
         NrSamples = int(NrSamples)
         self.arbitrary = False if len(Inputs.Marginals[0].InputValues) == 0 else True
@@ -76,7 +77,6 @@ class ExpDesigns:
             for i in range(NofPa):
                 Inputs.Marginals[i].Parameters = [np.min(Inputs.Marginals[i].InputValues),np.max(Inputs.Marginals[i].InputValues)]
         
-        
         ## Create a multivariate probability distribution
         self.JDist, self.Polytypes = self.DistConstructor()
         
@@ -166,7 +166,9 @@ class ExpDesigns:
             elif DistType is None:
                 polytype = 'arbitrary'
                 Dist = None
-                
+                data = Inputs.Marginals[parIdx].InputValues
+                result = st.kstest(data, st.uniform(loc=np.min(data), scale=np.max(data)-np.min(data)).pdf)
+                if result[0] > 0.75: Inputs.Marginals[parIdx].Dist = 'unif'
             else:
                 raise ValueError('DistType %s for parameter %s is not available.'%(DistType,parIdx+1))
             
diff --git a/BayesValidRox/surrogate_models/surrogate_models.py b/BayesValidRox/surrogate_models/surrogate_models.py
index a8bc70145..3ddb358fe 100644
--- a/BayesValidRox/surrogate_models/surrogate_models.py
+++ b/BayesValidRox/surrogate_models/surrogate_models.py
@@ -410,7 +410,7 @@ class Metamodel:
             
             return univ_vals
     #--------------------------------------------------------------------------------------------------------
-    def PCE_create_Psi(self,BasisIndices,univ_p_val, active=None):
+    def PCE_create_Psi(self,BasisIndices,univ_p_val):
         """
         This function assemble the design matrix Psi from the given basis index 
         set INDICES and the univariate polynomial evaluations univ_p_val.
@@ -439,7 +439,6 @@ class Metamodel:
         # Assemble the Psi matrix
         for m in range(BasisIndices.shape[1]):
             aa = np.where(BasisIndices[:,m]>0)[0] 
-            if active is not None: aa = list(set(aa).intersection(active)) # Take active terms
             try:
                 basisIdx = BasisIndices[aa,m]
                 Psi[:,aa] = np.multiply(Psi[:,aa] , np.reshape(univ_p_val[:, m, basisIdx], Psi[:,aa].shape))
@@ -2178,15 +2177,8 @@ class Metamodel:
                     y_mean, y_std = gp.predict(self.Samples, return_std=True)
 
                 else: # Perdiction with PCE or pcekriging
-                    # Find active terms
-                    try:
-                        active_terms = np.nonzero(self.clf_poly[Outkey][Inkey].coef_)[0]
-                    except:
-                        active_terms = np.nonzero(self.CoeffsDict[Outkey][Inkey].coef_)[0]
-                    
                     # Assemble Psi matrix
-                    PSI_Val = self.PCE_create_Psi(self.BasisDict[Outkey][Inkey], univ_p_val,
-                                                  active=active_terms)
+                    PSI_Val = self.PCE_create_Psi(self.BasisDict[Outkey][Inkey], univ_p_val)
 
                     # Perdiction 
                     try: # with error bar
-- 
GitLab