From 44e9a6cd0d65181bfc2a8ca5cd6c328e8179b247 Mon Sep 17 00:00:00 2001 From: Farid Mohammadi <farid.mohammadi@iws.uni-stuttgart.de> Date: Mon, 23 Mar 2020 18:14:02 +0100 Subject: [PATCH] [surrogate][seqDesign] modified utilities for Bayesian optimal design of experiment. --- .../surrogate_models/surrogate_models.py | 160 +++++++++++++----- .../AnalyticalFunctionComplex_Test.py | 16 +- .../AnalyticalFunction_Test.py | 16 +- 3 files changed, 130 insertions(+), 62 deletions(-) diff --git a/BayesValidRox/surrogate_models/surrogate_models.py b/BayesValidRox/surrogate_models/surrogate_models.py index b82458bc6..d35d8fe47 100644 --- a/BayesValidRox/surrogate_models/surrogate_models.py +++ b/BayesValidRox/surrogate_models/surrogate_models.py @@ -847,7 +847,7 @@ class aPCE: #-------------------------------------------------------------------------------------------------------- def util_BayesianDesign(self, X_can, Model, sigma2Dict, var='DKL'): - + SamplingMethod = 'random' @@ -860,27 +860,31 @@ class aPCE: oldExpDesignY = PCEModel.ExpDesign.Y - # Evaluate the PCE metamodels at that location ??? - Y_PC_can, _ = self.eval_PCEmodel(np.array([X_can])) + # # Evaluate the PCE metamodels at that location ??? + # Y_PC_can, _ = self.eval_PCEmodel(np.array([X_can])) - # Add all suggestion as new ExpDesign - NewExpDesignX = np.vstack((oldExpDesignX, X_can)) + # # Add all suggestion as new ExpDesign + # NewExpDesignX = np.vstack((oldExpDesignX, X_can)) - NewExpDesignY = {} - for key in oldExpDesignY.keys(): - try: - NewExpDesignY[key] = np.vstack((oldExpDesignY[key], Y_PC_can[key])) - except: - NewExpDesignY[key] = oldExpDesignY[key] + # NewExpDesignY = {} + # for key in oldExpDesignY.keys(): + # try: + # NewExpDesignY[key] = np.vstack((oldExpDesignY[key], Y_PC_can[key])) + # except: + # NewExpDesignY[key] = oldExpDesignY[key] - PCEModel.ExpDesign.SamplingMethod = 'user' - PCEModel.ExpDesign.X = NewExpDesignX - PCEModel.ExpDesign.Y = NewExpDesignY + # PCEModel.ExpDesign.SamplingMethod = 'user' + # PCEModel.ExpDesign.X = NewExpDesignX + # PCEModel.ExpDesign.Y = NewExpDesignY - # Create the SparseBayes-based PCE metamodel: - PCE_SparseBayes_can = PCEModel.create_PCE_normDesign(Model, OptDesignFlag=True) + # # Create the SparseBayes-based PCE metamodel: + # PCE_SparseBayes_can = PCEModel.create_PCE_normDesign(Model, OptDesignFlag=True) + PCE_SparseBayes_can = PCEModel + # Evaluate the PCE metamodels at that location ??? + Y_mean_can, Y_std_can = self.eval_PCEmodel(np.array([X_can])) + # Get the data ObservationData = self.Observations @@ -892,10 +896,27 @@ class aPCE: while ((ESS > MCsize) or (ESS < 1)): # Monte Carlo simulation for the candidate design - ExpDesign = ExpDesigns(self.Inputs) - X_MC = ExpDesign.GetSample(MCsize,self.MaxPceDegree, SamplingMethod); - Y_PC_MC, std_PC_MC = PCE_SparseBayes_can.eval_PCEmodel(X_MC) + # ExpDesign = ExpDesigns(self.Inputs) + # X_MC = ExpDesign.GetSample(MCsize,self.MaxPceDegree, SamplingMethod); + + # # Evaluate the PCEModel at the given samples + # Y_PC_MC, std_PC_MC = PCE_SparseBayes_can.eval_PCEmodel(X_MC) + + # Sample a distribution for a normal dist + # with Y_mean_can as the mean and Y_std_can as std. + Y_PC_MC, std_PC_MC = {}, {} + logPriorLikelihoods = np.zeros((MCsize)) + for key in list(Y_mean_can): + means, stds = Y_mean_can[key][0], Y_std_can[key][0] + cov = np.zeros((means.shape[0], means.shape[0]), float) + np.fill_diagonal(cov, stds**2) + + multiNormalDensity = stats.multivariate_normal(mean=means, cov=cov) + Y_PC_MC[key] = multiNormalDensity.rvs(MCsize) + logPriorLikelihoods = np.multiply(logPriorLikelihoods, multiNormalDensity.logpdf(Y_PC_MC[key])) + #Y_PC_MC[key] = np.random.multivariate_normal(means, cov, MCsize) + std_PC_MC[key] = np.zeros((MCsize, means.shape[0])) # Likelihood computation (Comparison of data # and simulation results via PCE with candidate design) @@ -916,34 +937,56 @@ class aPCE: # Reject the poorly performed prior accepted = (Likelihoods/np.max(Likelihoods)) >= unif - X_Posterior = X_MC[accepted] + + + # Prior-based estimation of BME + logBME = np.log(np.nanmean(Likelihoods)) + + # Posterior-based expectation of likelihoods + postExpLikelihoods = np.mean(np.log(Likelihoods[accepted])) + + # Posterior-based expectation of prior densities + postExpPrior = np.mean(logPriorLikelihoods[accepted]) # Utility function Eq.2 in Ref. (2) # Posterior covariance matrix after observing data y # Kullback-Leibler Divergence (Sergey's paper) if var == 'DKL': - BME = np.log(np.nanmean(Likelihoods)) # TODO: Calculate the correction factor for BME # BMECorrFactor = self.BME_Corr_Weight(PCE_SparseBayes_can, ObservationData, sigma2Dict) # BME += BMECorrFactor - U_J_d = np.mean(np.log(Likelihoods[Likelihoods!=0])- BME) - #U_J_d = np.mean(np.log(Likelihoods[accepted])) - BME - #U_J_d = np.nanmean(Likelihoods*np.log(Likelihoods)/np.nansum(Likelihoods))- np.log(BME) - + + #U_J_d = np.mean(np.log(Likelihoods[Likelihoods!=0])- BME) + U_J_d = postExpLikelihoods - logBME + + # Marginal likelihood + elif var == 'BME': + + U_J_d = logBME + + # Entropy-based information gain + elif var == 'infEntropy': + logBME = np.log(np.nanmean(Likelihoods)) + infEntropy = logBME - postExpPrior - postExpLikelihoods + U_J_d = infEntropy * -1 # -1 for minimization + # D-Posterior-precision elif var == 'DPP': + X_MC = None + X_Posterior = X_MC[accepted] # covariance of the posterior parameters U_J_d = -np.log(np.linalg.det(np.cov(X_Posterior))) # A-Posterior-precision elif var == 'APP': + X_MC = None + X_Posterior = X_MC[accepted] # trace of the posterior parameters U_J_d = -np.log(np.trace(np.cov(X_Posterior))) else: print('The algorithm you requested has not been implemented yet!') - return -1 * U_J_d # -1 is for minimization instead of maximization #-------------------------------------------------------------------------------------------------------- @@ -1145,28 +1188,27 @@ class aPCE: scoreExploration = np.delete(scoreExploration, badSampleIdx, axis=0) - # Split the candidates in groups for multiprocessing - split_allCandidates = np.array_split(allCandidates, NrofCandGroups, axis=0) - args = [(ExploitMethod, Model, split_allCandidates[i], i, sigma2Dict , var) for i in range(NrofCandGroups)] - - # With Pool.starmap_async() - results = pool.starmap_async(self.Utility_runner, args).get() - - # Close the pool - pool.close() - - # Retrieve the results and append them - U_J_d = np.concatenate([results[NofE][1] for NofE in range(NrofCandGroups)]) - - # TODO: Get the expected value (mean) of the Utility score for each cell - if ExploreMethod == 'Voronoi': U_J_d = np.mean(U_J_d.reshape(-1, NCandidate), axis=1) - - # Normalize U_J_d - norm_U_J_d = U_J_d / np.nansum(U_J_d) - # ------- Calculate Total score ------- if TradeOffScheme == 'None': - totalScore = norm_U_J_d + # Split the candidates in groups for multiprocessing + split_allCandidates = np.array_split(allCandidates, NrofCandGroups, axis=0) + args = [(ExploitMethod, Model, split_allCandidates[i], i, sigma2Dict , var) for i in range(NrofCandGroups)] + + # With Pool.starmap_async() + results = pool.starmap_async(self.Utility_runner, args).get() + + # Close the pool + pool.close() + + # Retrieve the results and append them + U_J_d = np.concatenate([results[NofE][1] for NofE in range(NrofCandGroups)]) + + # TODO: Get the expected value (mean) of the Utility score for each cell + if ExploreMethod == 'Voronoi': U_J_d = np.mean(U_J_d.reshape(-1, NCandidate), axis=1) + + # Normalize U_J_d + totalScore = U_J_d / np.nansum(U_J_d) + elif TradeOffScheme == 'epsilon-decreasing': # epsilon-decreasing scheme # Start with more exploration and increase the influence of exploitation @@ -1179,7 +1221,31 @@ class aPCE: print("\nweightExploration={0:0.3f} weightExpoitation={1:0.3f}".format(weightExploration, (1 - weightExploration))) - totalScore = (1 - weightExploration)*norm_U_J_d + weightExploration*scoreExploration + if (1 - weightExploration) == 0: + totalScore = scoreExploration + else: + # Split the candidates in groups for multiprocessing + split_allCandidates = np.array_split(allCandidates, NrofCandGroups, axis=0) + args = [(ExploitMethod, Model, split_allCandidates[i], i, sigma2Dict , var) for i in range(NrofCandGroups)] + + # With Pool.starmap_async() + results = pool.starmap_async(self.Utility_runner, args).get() + + # Close the pool + pool.close() + + # Retrieve the results and append them + U_J_d = np.concatenate([results[NofE][1] for NofE in range(NrofCandGroups)]) + + # TODO: Get the expected value (mean) of the Utility score for each cell + if ExploreMethod == 'Voronoi': U_J_d = np.mean(U_J_d.reshape(-1, NCandidate), axis=1) + + # Normalize U_J_d + norm_U_J_d = U_J_d / np.nansum(U_J_d) + + # Total score + totalScore = (1 - weightExploration)*norm_U_J_d + weightExploration*scoreExploration + # temp: Plot dim = self.ExpDesign.X.shape[1] diff --git a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunctionComplex_Test.py b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunctionComplex_Test.py index 075717757..27a2f432e 100644 --- a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunctionComplex_Test.py +++ b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunctionComplex_Test.py @@ -121,7 +121,7 @@ if __name__ == "__main__": # One-shot (normal) or Sequential Adaptive (sequential) Design MetaModelOpts.ExpDesign.Method = 'sequential' - NrofInitSamples = 50 + NrofInitSamples = 100 MetaModelOpts.ExpDesign.NrSamples = NrofInitSamples # Sampling methods @@ -131,7 +131,7 @@ if __name__ == "__main__": # Sequential experimental design (needed only for sequential ExpDesign) MetaModelOpts.ExpDesign.NrofNewSample = 5 - MetaModelOpts.ExpDesign.MaxNSamples = 100 + MetaModelOpts.ExpDesign.MaxNSamples = 200 MetaModelOpts.ExpDesign.ModifiedLOOThreshold = 1e-6 # Defining the measurement error, if it's known a priori @@ -152,31 +152,31 @@ if __name__ == "__main__": # ------- Sequential Design configuration -------- # ------------------------------------------------ # 1) 'None' 2) 'epsilon-decreasing' - MetaModelOpts.ExpDesign.TradeOffScheme = 'None' + MetaModelOpts.ExpDesign.TradeOffScheme = 'epsilon-decreasing' #MetaModelOpts.ExpDesign.nReprications = 2 # -------- Exploration ------ #1)'Voronoi' 2)'random' 3)'latin_hypercube' 4)'dual annealing' - MetaModelOpts.ExpDesign.ExploreMethod = 'Voronoi' + MetaModelOpts.ExpDesign.ExploreMethod = 'random' # Use when 'dual annealing' chosen MetaModelOpts.ExpDesign.MaxFunItr = 200 # Use when 'Voronoi' or 'random' or 'latin_hypercube' chosen - MetaModelOpts.ExpDesign.NCandidate = 50 + MetaModelOpts.ExpDesign.NCandidate = 100 MetaModelOpts.ExpDesign.NrofCandGroups = 4 # -------- Exploitation ------ # 1)'BayesOptDesign' 2)'VarOptDesign' 3)'alphabetic' 4)'Space-filling' - MetaModelOpts.ExpDesign.ExploitMethod = 'VarOptDesign' + MetaModelOpts.ExpDesign.ExploitMethod = 'BayesOptDesign' # BayesOptDesign -> when data is available # 1)DKL (Kullback-Leibler Divergence) 2)DPP (D-Posterior-percision) # 3)APP (A-Posterior-percision) - #MetaModelOpts.ExpDesign.UtilityFunction = 'DKL' #['DKL', 'DPP'] + MetaModelOpts.ExpDesign.UtilityFunction = 'DKL' #['DKL', 'DPP'] # VarBasedOptDesign -> when data is not available # Only with Vornoi >>> 1)Entropy 2)EIGF, 3)ALM, 4)LOOCV - MetaModelOpts.ExpDesign.UtilityFunction = 'Entropy'#['EIGF', 'Entropy', 'LOOCV'] + #MetaModelOpts.ExpDesign.UtilityFunction = 'Entropy'#['EIGF', 'Entropy', 'LOOCV'] # alphabetic # 1)D-Opt (D-Optimality) 2)A-Opt (A-Optimality) diff --git a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py index edd9dd72b..32d231bac 100644 --- a/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py +++ b/BayesValidRox/tests/AnalyticalFunction/AnalyticalFunction_Test.py @@ -109,7 +109,7 @@ if __name__ == "__main__": MetaModelOpts.addExpDesign() # One-shot (normal) or Sequential Adaptive (sequential) Design - MetaModelOpts.ExpDesign.Method = 'normal' + MetaModelOpts.ExpDesign.Method = 'sequential' NrofInitSamples = 15 MetaModelOpts.ExpDesign.NrSamples = NrofInitSamples @@ -119,7 +119,7 @@ if __name__ == "__main__": MetaModelOpts.ExpDesign.SamplingMethod = 'halton' # Sequential experimental design (needed only for sequential ExpDesign) - MetaModelOpts.ExpDesign.NrofNewSample = 1 + MetaModelOpts.ExpDesign.NrofNewSample = 5 MetaModelOpts.ExpDesign.MaxNSamples = 50 MetaModelOpts.ExpDesign.ModifiedLOOThreshold = 1e-6 @@ -144,27 +144,29 @@ if __name__ == "__main__": #MetaModelOpts.ExpDesign.nReprications = 5 # -------- Exploration ------ #1)'Voronoi' 2)'random' 3)'latin_hypercube' 4)'dual annealing' - MetaModelOpts.ExpDesign.ExploreMethod = 'Voronoi' + MetaModelOpts.ExpDesign.ExploreMethod = 'random' # Use when 'dual annealing' chosen MetaModelOpts.ExpDesign.MaxFunItr = 200 # Use when 'Voronoi' or 'MC' or 'LHS' chosen - MetaModelOpts.ExpDesign.NCandidate = 10 + MetaModelOpts.ExpDesign.NCandidate = 200 MetaModelOpts.ExpDesign.NrofCandGroups = 4 # -------- Exploitation ------ # 1)'BayesOptDesign' 2)'VarOptDesign' 3)'alphabetic' 4)'Space-filling' - MetaModelOpts.ExpDesign.ExploitMethod = 'VarOptDesign' + MetaModelOpts.ExpDesign.ExploitMethod = 'BayesOptDesign' # BayesOptDesign -> when data is available # 1)DKL (Kullback-Leibler Divergence) 2)DPP (D-Posterior-percision) # 3)APP (A-Posterior-percision) - #MetaModelOpts.ExpDesign.UtilityFunction = 'DKL' #['DKL', 'DPP'] + # 1)DKL (Kullback-Leibler Divergence) 2)BME (Bayesian model evidence) + # 3)infEntropy (Entropy-based information gain) + MetaModelOpts.ExpDesign.UtilityFunction = 'DKL' #['DKL', 'DPP'] # VarBasedOptDesign -> when data is not available # Only with Vornoi >>> 1)Entropy 2)EIGF, 3)ALM, 4)LOOCV - MetaModelOpts.ExpDesign.UtilityFunction = 'Entropy' #['EIGF', 'Entropy', 'LOOCV'] + #MetaModelOpts.ExpDesign.UtilityFunction = 'Entropy' #['EIGF', 'Entropy', 'LOOCV'] # alphabetic # 1)D-Opt (D-Optimality) 2)A-Opt (A-Optimality) -- GitLab