diff --git a/examples/principal_component_analysis/example_principalcomponentanalysis.py b/examples/principal_component_analysis/example_principalcomponentanalysis.py
index ff9cb628f6b5af3f570b4b10b979ceabba4ae4c8..69cb4aa8fa2846e4d20a2cdb16e749716f4b8097 100644
--- a/examples/principal_component_analysis/example_principalcomponentanalysis.py
+++ b/examples/principal_component_analysis/example_principalcomponentanalysis.py
@@ -109,7 +109,7 @@ if __name__ == "__main__":
     # Select if you want to preserve the spatial/temporal depencencies
     MetaModelOpts.dim_red_method = 'PCA'
     MetaModelOpts.var_pca_threshold = 99.999
-    MetaModelOpts.n_pca_components = 9#10#5#10
+    MetaModelOpts.n_pca_components = 10#9#10#5#10
     #MetaModelOpts.n_bootstrap_itrs = 2
 
     # Select your metamodel method
diff --git a/src/bayesvalidrox/surrogate_models/surrogate_models.py b/src/bayesvalidrox/surrogate_models/surrogate_models.py
index 629bdd7a5caab1eda13894d900b991d1b2fe0ede..1670c9439566cb7361ffb45ed2601e9300f815f1 100644
--- a/src/bayesvalidrox/surrogate_models/surrogate_models.py
+++ b/src/bayesvalidrox/surrogate_models/surrogate_models.py
@@ -336,7 +336,6 @@ class MetaModel:
         If set to true constraints will be applied during training. 
         In this case the training uses OLS. In this version the constraints 
         need to be set explicitly in this class.
-
     verbose : bool
         Prints summary of the regression results. Default is `False`.
 
@@ -540,7 +539,7 @@ class MetaModel:
         # --- Bootstrap sampling ---
         # Correct number of bootstrap if PCA transformation is required.
         if self.dim_red_method.lower() == 'pca' and self.n_bootstrap_itrs == 1:
-            self.n_bootstrap_itrs = 100
+            self.n_bootstrap_itrs = 1#00
 
         # Check if fast version (update coeffs with OLS) is selected.
         n_comp_dict = {}
@@ -1126,7 +1125,7 @@ class MetaModel:
         n_pca_components = self.n_pca_components
         
         # Switch to var_pca if n_pca_components is too large
-        if n_pca_components >= n_features:
+        if (n_pca_components is not None) and (n_pca_components > n_features):
             n_pca_components = None
             if self.verbose:
                 print('')
@@ -1161,8 +1160,14 @@ class MetaModel:
         #    print('-' * 50)
         #    print()
 
+        # Set the solver to 'auto' if no reduction is wanted
+        # Otherwise use 'arpack'
+        solver = 'auto'
+        if n_pca_components < n_features:
+            solver = 'arpack'
+            
         # Fit and transform with the selected number of components
-        pca = sklearnPCA(n_components=n_pca_components, svd_solver='arpack')
+        pca = sklearnPCA(n_components=n_pca_components, svd_solver=solver)
         scaled_target = pca.fit_transform(target)
 
         return pca, scaled_target, n_pca_components