diff --git a/src/bayesvalidrox/surrogate_models/gaussian_process_sklearn.py b/src/bayesvalidrox/surrogate_models/gaussian_process_sklearn.py index abb7c20dae8cb99a3ada349cac4e185d92c9aef0..f1657829780e02686d065265c6aa8b0312012fb5 100644 --- a/src/bayesvalidrox/surrogate_models/gaussian_process_sklearn.py +++ b/src/bayesvalidrox/surrogate_models/gaussian_process_sklearn.py @@ -4,7 +4,6 @@ Implementation of metamodel as GPE, using the Scikit-Learn library """ -import copy import math import os import warnings @@ -175,6 +174,10 @@ class GPESkl(MetaModel): and the self.isotropy variable. If True, it initializes isotropic kernels. ToDo: Add additional kernels ToDo: Add option to include user-defined kernel + Raises + ------ + AttributeError: if an invalid type of Kernel is given + TypeError: if the kernel type is not a string Returns ------- List: with the kernels to iterate over @@ -215,9 +218,9 @@ class GPESkl(MetaModel): kernel_list = [kernel_dict[self._kernel_type]] kernel_names = [self._kernel_type] except: - print(f'The kernel option {self._kernel_type} is not available. An RBF kernel was chosen instead') - kernel_list = [kernel_dict['RBF']] - kernel_names = ['RBF'] + if isinstance(self._kernel_type, str): + raise AttributeError(f'The kernel option {self._kernel_type} is not available.') + raise TypeError(f'The kernel option {self._kernel_type} is of an invalid type.') return kernel_list, kernel_names @@ -232,7 +235,9 @@ class GPESkl(MetaModel): The parameter value combinations to train the model with. transform_type: str Transformation to apply to the input parameters. Default is None - + Raises + ------ + AttributeError: If an invalid scaling name is given. Returns ------- np.array: (#samples, #dim) @@ -252,10 +257,7 @@ class GPESkl(MetaModel): scaler = StandardScaler() X_S = scaler.fit_transform(X) else: - print(f'No scaler {transform_type} found. No scaling was done') - scaler = None - X_S = X - + raise AttributeError(f'No scaler {transform_type} found.') return X_S, scaler @_preprocessing_fit @@ -463,4 +465,3 @@ class GPESkl(MetaModel): std_pred[output] = std return mean_pred, std_pred - diff --git a/tests/test_GaussianProcessSklearn.py b/tests/test_GaussianProcessSklearn.py index db91128fa93e6e6b0d8c7699e4dda434d4900d91..1d0672f163147bb57f75a2fd8b119b02d923c7a6 100644 --- a/tests/test_GaussianProcessSklearn.py +++ b/tests/test_GaussianProcessSklearn.py @@ -17,6 +17,9 @@ import numpy as np import pytest import sys +from sklearn.preprocessing import MinMaxScaler, StandardScaler +from sklearn.gaussian_process import kernels + sys.path.append("../src/") from bayesvalidrox import GPESkl, Input @@ -59,7 +62,7 @@ def test_add_input_space(GPE) -> None: MetaModel = GPE MetaModel.add_InputSpace() -def test_fit() -> None: +def test_fit() -> None: """ Fit GPE """ @@ -107,39 +110,67 @@ def test_kernel_no_kertyp(GPE) -> None: MetaModel.build_kernels() assert MetaModel._kernel_type == 'RBF' -def test_kernel_wrong_kertyp(GPE) -> None: +def test_build_kernel_wrong_kername(GPE) -> None: """ - Test kernel with no kernel type + Test kernel with an invalid kernel name """ MetaModel = GPE - MetaModel.build_kernels() - MetaModel.kernel_type = 123 - assert MetaModel._kernel_type == 'RBF' + MetaModel._kernel_type = 'InvalidKernel' + + with pytest.raises(AttributeError, match="The kernel option InvalidKernel is not available."): + MetaModel.build_kernels() + +def test_build_kernel_wrong_kertyp(GPE) -> None: + """ + Test building kernels with an invalid variable type + """ + MetaModel = GPE + MetaModel._kernel_type = 123 + + with pytest.raises(TypeError, match="The kernel option 123 is of an invalid type."): + MetaModel.build_kernels() def test_kernel_type_rbf(GPE) -> None: """ Test kernel type """ MetaModel = GPE - MetaModel.kernel_type = 'rbf' - MetaModel.build_kernels() + MetaModel._kernel_type = 'RBF' + kernel_list, kernel_names = MetaModel.build_kernels() + + assert len(kernel_list) == 1, "Expected only one kernel when autoSelect is False" + assert kernel_names == ['RBF'], "Expected kernel name to be 'RBF'" def test_build_kernels_matern(GPE) -> None: """ Build kernels with Matern kernel type """ MetaModel = GPE - MetaModel.kernel_type = 'matern' - MetaModel.build_kernels() + MetaModel._kernel_type = 'Matern' + kernel_list, kernel_names = MetaModel.build_kernels() + + assert len(kernel_list) == 1, "Expected only one kernel when autoSelect is False" + assert kernel_names == ['Matern'], "Expected kernel name to be 'Matern'" def test_build_kernels_rq(GPE) -> None: """ Build kernels with Matern kernel type """ MetaModel = GPE - MetaModel.kernel_type = 'rq' - MetaModel.build_kernels() + MetaModel._kernel_type = 'RQ' + kernel_list, kernel_names = MetaModel.build_kernels() + assert len(kernel_list) == 1, "Expected only one kernel when autoSelect is False" + assert kernel_names == ['RQ'], "Expected kernel name to be 'RQ'" + +def test_auto_select_kernels(GPE) -> None: + """Build Kernels when autoSelect is True""" + MetaModel = GPE + MetaModel._autoSelect = True + kernel_list, kernel_names = MetaModel.build_kernels() + + assert len(kernel_list) == 3, "Expected three kernels when autoSelect is True" + assert sorted(kernel_names) == ['Matern', 'RBF', 'RQ'], "Expected kernel names to be 'Matern', 'RBF', 'RQ'" def test_build_kernels_with_length_scale(GPE) -> None: """ @@ -157,14 +188,39 @@ def test_build_kernels_with_bounds(GPE) -> None: MetaModel.kernel_bounds = (1e-2, 1e1) MetaModel.build_kernels() - def test_kernel_isotropy(GPE): """ Test kernel isotropy """ MetaModel = GPE MetaModel.kernel_isotropy = True - MetaModel.build_kernels() + kernels_list, kernel_names = MetaModel.build_kernels() + + assert len(kernels_list) == 1, "Expected only one kernel when autoSelect is False" + assert isinstance(kernels_list[ + 0].k2.length_scale, int), f"Expected 1 length scales for isotropic kernel, but got a list of them" + +def test_anisotropic_kernel(GPE) -> None: + """Build anisotropic kernels for a 2d case""" + ndim = 2 + Inputs = Input() + for i in range(ndim): + Inputs.add_marginals() + Inputs.Marginals[i].dist_type = 'uniform' + Inputs.Marginals[i].parameters = [0, 1] + samples = np.array([[0.2, 0.5], [0.8, 0.7]]) + MetaModel = GPESkl(Inputs) + MetaModel.CollocationPoints = samples + MetaModel.InputSpace = InputSpace(MetaModel.input_obj, MetaModel.meta_model_type) + MetaModel._kernel_isotropy = False + MetaModel._kernel_type = 'RBF' + + kernels_list, kernel_names = MetaModel.build_kernels() + + assert len(kernels_list) == 1, "Expected only one kernel when autoSelect is False" + assert kernel_names == ['RBF'], "Expected kernel name to be 'RBF'" + assert len(kernels_list[ + 0].k2.length_scale) == ndim, f"Expected {ndim} length scales for anisotropic kernel, but got {len(kernels_list[0].k2.length_scale)}" def test_adaptive_regression(GPE) -> None: """ @@ -190,6 +246,81 @@ def test_adaptive_regression_verbose(GPE) -> None: MetaModel.adaptive_regression(samples, outputs, varIdx=None, verbose=True) + +# # Added: +def test_transform_scale_x_norm(GPE) -> None: + """ + Test normalization using 'norm' (MinMaxScaler), for both the transform_x() and scale_x() functions. + """ + MetaModel = GPE + X = np.array([[1, 2], [3, 4], [5, 6]]) + X_transformed, scaler = MetaModel.transform_x(X, transform_type='norm') # Call directly on class + + expected_scaler = MinMaxScaler() + expected_X_transformed = expected_scaler.fit_transform(X) + + assert np.allclose(X_transformed, expected_X_transformed), "Normalization failed" + assert isinstance(scaler, MinMaxScaler), "Scaler object is not MinMaxScaler" + + MetaModel._x_scaler = scaler + X_scaled = MetaModel.scale_x(X, MetaModel._x_scaler) + assert np.allclose(X_scaled, expected_X_transformed), "Scaling after normalization failed" + + +def test_transform_scale_x_standard(GPE) -> None: + """Test standardization using 'standard' from the Scikit-Learn library, for both the transformation and the + scaling functions""" + X = np.array([[1, 2], [3, 4], [5, 6]]) + MetaModel = GPE + X_transformed, scaler = MetaModel.transform_x(X, transform_type='standard') + + expected_scaler = StandardScaler() + expected_X_transformed = expected_scaler.fit_transform(X) + + assert np.allclose(X_transformed, expected_X_transformed), "Standardization failed" + assert isinstance(scaler, StandardScaler), "Scaler object is not StandardScaler" + + MetaModel._x_scaler = scaler + X_scaled = MetaModel.scale_x(X, MetaModel._x_scaler) + assert np.allclose(X_scaled, expected_X_transformed), "Scaling after standardization failed" + + +def test_transform_x_none(GPE) -> None: + """Test the no transformation case when transform_type is None""" + X = np.array([[1, 2], [3, 4], [5, 6]]) + MetaModel = GPE + X_transformed, scaler = MetaModel.transform_x(X, transform_type=None) + + assert np.array_equal(X_transformed, X), "No transformation failed" + assert scaler is None, "Scaler object should be None" + + # Test scale_x(scaler=None) + MetaModel._x_scaler = scaler + X_scaled = MetaModel.scale_x(X, MetaModel._x_scaler) + assert np.allclose(X_scaled, X), "Scaling after standardization failed" + + +def test_transform_x_invalid_type(GPE) -> None: + """Test transformation with an invalid transformation type""" + X = np.array([[1, 2], [3, 4], [5, 6]]) + MetaModel = GPE + + # Expecting an AttributeError when an invalid transformation type is provided + with pytest.raises(AttributeError, match="No scaler invalid found."): + MetaModel.transform_x(X, transform_type='invalid') + + stop = 1 + + +def test_scale_x_none(GPE) -> None: + X = np.array([[1, 2], [3, 4], [5, 6]]) + MetaModel = GPE + _, scaler = MetaModel.transform_x(X, transform_type=None) + MetaModel._x_scaler = scaler + + X_transformed = MetaModel.scale_x(X, scaler) + assert np.array_equal(X_transformed, X), "No transformation failed" + #TO Do: def test_scale_x(GPE): """