From 8d231a833f425deb7c9146ed0347cbca35bdb970 Mon Sep 17 00:00:00 2001 From: Lars Reimann Date: Sat, 30 Mar 2024 16:58:02 +0100 Subject: [PATCH 1/4] feat: add suffixes to models to indicate their task --- .../ml/classical/classification/__init__.py | 28 +++++------ .../ml/classical/classification/_ada_boost.py | 8 ++-- .../classification/_decision_tree.py | 8 ++-- .../classification/_gradient_boosting.py | 8 ++-- .../classification/_k_nearest_neighbors.py | 8 ++-- .../classification/_logistic_regression.py | 8 ++-- .../classification/_random_forest.py | 8 ++-- .../classification/_support_vector_machine.py | 16 +++---- .../ml/classical/regression/__init__.py | 40 ++++++++-------- .../ml/classical/regression/_ada_boost.py | 8 ++-- .../ml/classical/regression/_decision_tree.py | 8 ++-- .../regression/_elastic_net_regression.py | 8 ++-- .../regression/_gradient_boosting.py | 8 ++-- .../regression/_k_nearest_neighbors.py | 8 ++-- .../classical/regression/_lasso_regression.py | 8 ++-- .../regression/_linear_regression.py | 8 ++-- .../ml/classical/regression/_random_forest.py | 8 ++-- .../classical/regression/_ridge_regression.py | 8 ++-- .../regression/_support_vector_machine.py | 16 +++---- src/safeds/ml/nn/__init__.py | 6 +-- src/safeds/ml/nn/_model.py | 4 +- .../classification/test_ada_boost.py | 22 ++++----- .../classification/test_classifier.py | 28 +++++------ .../classification/test_gradient_boosting.py | 14 +++--- .../test_k_nearest_neighbors.py | 10 ++-- .../classification/test_random_forest.py | 8 ++-- .../test_support_vector_machine.py | 48 +++++++++---------- .../ml/classical/regression/test_ada_boost.py | 22 ++++----- .../regression/test_elastic_net_regression.py | 20 ++++---- .../regression/test_gradient_boosting.py | 14 +++--- .../regression/test_k_nearest_neighbors.py | 10 ++-- .../regression/test_lasso_regression.py | 10 ++-- .../regression/test_random_forest.py | 8 ++-- .../ml/classical/regression/test_regressor.py | 40 ++++++++-------- .../regression/test_ridge_regression.py | 10 ++-- .../regression/test_support_vector_machine.py | 48 +++++++++---------- .../safeds/ml/classical/test_util_sklearn.py | 4 +- tests/safeds/ml/nn/test_model.py | 42 ++++++++-------- 38 files changed, 294 insertions(+), 294 deletions(-) diff --git a/src/safeds/ml/classical/classification/__init__.py b/src/safeds/ml/classical/classification/__init__.py index 7871a7bde..5b333a83e 100644 --- a/src/safeds/ml/classical/classification/__init__.py +++ b/src/safeds/ml/classical/classification/__init__.py @@ -1,21 +1,21 @@ """Classes for classification tasks.""" -from ._ada_boost import AdaBoost +from ._ada_boost import AdaBoostClassifier from ._classifier import Classifier -from ._decision_tree import DecisionTree -from ._gradient_boosting import GradientBoosting -from ._k_nearest_neighbors import KNearestNeighbors -from ._logistic_regression import LogisticRegression -from ._random_forest import RandomForest -from ._support_vector_machine import SupportVectorMachine +from ._decision_tree import DecisionTreeClassifier +from ._gradient_boosting import GradientBoostingClassifier +from ._k_nearest_neighbors import KNearestNeighborsClassifier +from ._logistic_regression import LogisticRegressionClassifier +from ._random_forest import RandomForestClassifier +from ._support_vector_machine import SupportVectorMachineClassifier __all__ = [ - "AdaBoost", + "AdaBoostClassifier", "Classifier", - "DecisionTree", - "GradientBoosting", - "KNearestNeighbors", - "LogisticRegression", - "RandomForest", - "SupportVectorMachine", + "DecisionTreeClassifier", + "GradientBoostingClassifier", + "KNearestNeighborsClassifier", + "LogisticRegressionClassifier", + "RandomForestClassifier", + "SupportVectorMachineClassifier", ] diff --git a/src/safeds/ml/classical/classification/_ada_boost.py b/src/safeds/ml/classical/classification/_ada_boost.py index 6000b4ff6..8cb6c28d0 100644 --- a/src/safeds/ml/classical/classification/_ada_boost.py +++ b/src/safeds/ml/classical/classification/_ada_boost.py @@ -15,7 +15,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class AdaBoost(Classifier): +class AdaBoostClassifier(Classifier): """ Ada Boost classification. @@ -99,7 +99,7 @@ def learning_rate(self) -> float: """ return self._learning_rate - def fit(self, training_set: TaggedTable) -> AdaBoost: + def fit(self, training_set: TaggedTable) -> AdaBoostClassifier: """ Create a copy of this classifier and fit it with the given training data. @@ -112,7 +112,7 @@ def fit(self, training_set: TaggedTable) -> AdaBoost: Returns ------- - fitted_classifier : AdaBoost + fitted_classifier : AdaBoostClassifier The fitted classifier. Raises @@ -131,7 +131,7 @@ def fit(self, training_set: TaggedTable) -> AdaBoost: wrapped_classifier = self._get_sklearn_classifier() fit(wrapped_classifier, training_set) - result = AdaBoost( + result = AdaBoostClassifier( learner=self.learner, maximum_number_of_learners=self.maximum_number_of_learners, learning_rate=self._learning_rate, diff --git a/src/safeds/ml/classical/classification/_decision_tree.py b/src/safeds/ml/classical/classification/_decision_tree.py index 5b183e8b4..78b7e1559 100644 --- a/src/safeds/ml/classical/classification/_decision_tree.py +++ b/src/safeds/ml/classical/classification/_decision_tree.py @@ -14,7 +14,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class DecisionTree(Classifier): +class DecisionTreeClassifier(Classifier): """Decision tree classification.""" def __init__(self) -> None: @@ -23,7 +23,7 @@ def __init__(self) -> None: self._feature_names: list[str] | None = None self._target_name: str | None = None - def fit(self, training_set: TaggedTable) -> DecisionTree: + def fit(self, training_set: TaggedTable) -> DecisionTreeClassifier: """ Create a copy of this classifier and fit it with the given training data. @@ -36,7 +36,7 @@ def fit(self, training_set: TaggedTable) -> DecisionTree: Returns ------- - fitted_classifier : DecisionTree + fitted_classifier : DecisionTreeClassifier The fitted classifier. Raises @@ -55,7 +55,7 @@ def fit(self, training_set: TaggedTable) -> DecisionTree: wrapped_classifier = self._get_sklearn_classifier() fit(wrapped_classifier, training_set) - result = DecisionTree() + result = DecisionTreeClassifier() result._wrapped_classifier = wrapped_classifier result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/classification/_gradient_boosting.py b/src/safeds/ml/classical/classification/_gradient_boosting.py index 593d1c632..3f22b08ff 100644 --- a/src/safeds/ml/classical/classification/_gradient_boosting.py +++ b/src/safeds/ml/classical/classification/_gradient_boosting.py @@ -15,7 +15,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class GradientBoosting(Classifier): +class GradientBoostingClassifier(Classifier): """ Gradient boosting classification. @@ -74,7 +74,7 @@ def learning_rate(self) -> float: """ return self._learning_rate - def fit(self, training_set: TaggedTable) -> GradientBoosting: + def fit(self, training_set: TaggedTable) -> GradientBoostingClassifier: """ Create a copy of this classifier and fit it with the given training data. @@ -87,7 +87,7 @@ def fit(self, training_set: TaggedTable) -> GradientBoosting: Returns ------- - fitted_classifier : GradientBoosting + fitted_classifier : GradientBoostingClassifier The fitted classifier. Raises @@ -106,7 +106,7 @@ def fit(self, training_set: TaggedTable) -> GradientBoosting: wrapped_classifier = self._get_sklearn_classifier() fit(wrapped_classifier, training_set) - result = GradientBoosting(number_of_trees=self._number_of_trees, learning_rate=self._learning_rate) + result = GradientBoostingClassifier(number_of_trees=self._number_of_trees, learning_rate=self._learning_rate) result._wrapped_classifier = wrapped_classifier result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/classification/_k_nearest_neighbors.py b/src/safeds/ml/classical/classification/_k_nearest_neighbors.py index 7340af888..8f67ed95b 100644 --- a/src/safeds/ml/classical/classification/_k_nearest_neighbors.py +++ b/src/safeds/ml/classical/classification/_k_nearest_neighbors.py @@ -15,7 +15,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class KNearestNeighbors(Classifier): +class KNearestNeighborsClassifier(Classifier): """ K-nearest-neighbors classification. @@ -56,7 +56,7 @@ def number_of_neighbors(self) -> int: """ return self._number_of_neighbors - def fit(self, training_set: TaggedTable) -> KNearestNeighbors: + def fit(self, training_set: TaggedTable) -> KNearestNeighborsClassifier: """ Create a copy of this classifier and fit it with the given training data. @@ -69,7 +69,7 @@ def fit(self, training_set: TaggedTable) -> KNearestNeighbors: Returns ------- - fitted_classifier : KNearestNeighbors + fitted_classifier : KNearestNeighborsClassifier The fitted classifier. Raises @@ -99,7 +99,7 @@ def fit(self, training_set: TaggedTable) -> KNearestNeighbors: wrapped_classifier = self._get_sklearn_classifier() fit(wrapped_classifier, training_set) - result = KNearestNeighbors(self._number_of_neighbors) + result = KNearestNeighborsClassifier(self._number_of_neighbors) result._wrapped_classifier = wrapped_classifier result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/classification/_logistic_regression.py b/src/safeds/ml/classical/classification/_logistic_regression.py index b2a5cbeba..67630396f 100644 --- a/src/safeds/ml/classical/classification/_logistic_regression.py +++ b/src/safeds/ml/classical/classification/_logistic_regression.py @@ -14,7 +14,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class LogisticRegression(Classifier): +class LogisticRegressionClassifier(Classifier): """Regularized logistic regression.""" def __init__(self) -> None: @@ -23,7 +23,7 @@ def __init__(self) -> None: self._feature_names: list[str] | None = None self._target_name: str | None = None - def fit(self, training_set: TaggedTable) -> LogisticRegression: + def fit(self, training_set: TaggedTable) -> LogisticRegressionClassifier: """ Create a copy of this classifier and fit it with the given training data. @@ -36,7 +36,7 @@ def fit(self, training_set: TaggedTable) -> LogisticRegression: Returns ------- - fitted_classifier : LogisticRegression + fitted_classifier : LogisticRegressionClassifier The fitted classifier. Raises @@ -55,7 +55,7 @@ def fit(self, training_set: TaggedTable) -> LogisticRegression: wrapped_classifier = self._get_sklearn_classifier() fit(wrapped_classifier, training_set) - result = LogisticRegression() + result = LogisticRegressionClassifier() result._wrapped_classifier = wrapped_classifier result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/classification/_random_forest.py b/src/safeds/ml/classical/classification/_random_forest.py index 04e573d5a..c237fd5a7 100644 --- a/src/safeds/ml/classical/classification/_random_forest.py +++ b/src/safeds/ml/classical/classification/_random_forest.py @@ -15,7 +15,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class RandomForest(Classifier): +class RandomForestClassifier(Classifier): """Random forest classification. Parameters @@ -54,7 +54,7 @@ def number_of_trees(self) -> int: """ return self._number_of_trees - def fit(self, training_set: TaggedTable) -> RandomForest: + def fit(self, training_set: TaggedTable) -> RandomForestClassifier: """ Create a copy of this classifier and fit it with the given training data. @@ -67,7 +67,7 @@ def fit(self, training_set: TaggedTable) -> RandomForest: Returns ------- - fitted_classifier : RandomForest + fitted_classifier : RandomForestClassifier The fitted classifier. Raises @@ -86,7 +86,7 @@ def fit(self, training_set: TaggedTable) -> RandomForest: wrapped_classifier = self._get_sklearn_classifier() fit(wrapped_classifier, training_set) - result = RandomForest(number_of_trees=self._number_of_trees) + result = RandomForestClassifier(number_of_trees=self._number_of_trees) result._wrapped_classifier = wrapped_classifier result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/classification/_support_vector_machine.py b/src/safeds/ml/classical/classification/_support_vector_machine.py index 33e01188e..34bac1bdf 100644 --- a/src/safeds/ml/classical/classification/_support_vector_machine.py +++ b/src/safeds/ml/classical/classification/_support_vector_machine.py @@ -30,7 +30,7 @@ def _get_sklearn_kernel(self) -> object: """ -class SupportVectorMachine(Classifier): +class SupportVectorMachineClassifier(Classifier): """ Support vector machine. @@ -151,18 +151,18 @@ def _get_kernel_name(self) -> str: TypeError If the kernel type is invalid. """ - if isinstance(self.kernel, SupportVectorMachine.Kernel.Linear): + if isinstance(self.kernel, SupportVectorMachineClassifier.Kernel.Linear): return "linear" - elif isinstance(self.kernel, SupportVectorMachine.Kernel.Polynomial): + elif isinstance(self.kernel, SupportVectorMachineClassifier.Kernel.Polynomial): return "poly" - elif isinstance(self.kernel, SupportVectorMachine.Kernel.Sigmoid): + elif isinstance(self.kernel, SupportVectorMachineClassifier.Kernel.Sigmoid): return "sigmoid" - elif isinstance(self.kernel, SupportVectorMachine.Kernel.RadialBasisFunction): + elif isinstance(self.kernel, SupportVectorMachineClassifier.Kernel.RadialBasisFunction): return "rbf" else: raise TypeError("Invalid kernel type.") - def fit(self, training_set: TaggedTable) -> SupportVectorMachine: + def fit(self, training_set: TaggedTable) -> SupportVectorMachineClassifier: """ Create a copy of this classifier and fit it with the given training data. @@ -175,7 +175,7 @@ def fit(self, training_set: TaggedTable) -> SupportVectorMachine: Returns ------- - fitted_classifier : SupportVectorMachine + fitted_classifier : SupportVectorMachineClassifier The fitted classifier. Raises @@ -194,7 +194,7 @@ def fit(self, training_set: TaggedTable) -> SupportVectorMachine: wrapped_classifier = self._get_sklearn_classifier() fit(wrapped_classifier, training_set) - result = SupportVectorMachine(c=self._c, kernel=self._kernel) + result = SupportVectorMachineClassifier(c=self._c, kernel=self._kernel) result._wrapped_classifier = wrapped_classifier result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/regression/__init__.py b/src/safeds/ml/classical/regression/__init__.py index 9d45d16e6..7c042454f 100644 --- a/src/safeds/ml/classical/regression/__init__.py +++ b/src/safeds/ml/classical/regression/__init__.py @@ -1,27 +1,27 @@ """Models for regression tasks.""" -from ._ada_boost import AdaBoost -from ._decision_tree import DecisionTree -from ._elastic_net_regression import ElasticNetRegression -from ._gradient_boosting import GradientBoosting -from ._k_nearest_neighbors import KNearestNeighbors -from ._lasso_regression import LassoRegression -from ._linear_regression import LinearRegression -from ._random_forest import RandomForest +from ._ada_boost import AdaBoostRegressor +from ._decision_tree import DecisionTreeRegressor +from ._elastic_net_regression import ElasticNetRegressor +from ._gradient_boosting import GradientBoostingRegressor +from ._k_nearest_neighbors import KNearestNeighborsRegressor +from ._lasso_regression import LassoRegressor +from ._linear_regression import LinearRegressionRegressor +from ._random_forest import RandomForestRegressor from ._regressor import Regressor -from ._ridge_regression import RidgeRegression -from ._support_vector_machine import SupportVectorMachine +from ._ridge_regression import RidgeRegressor +from ._support_vector_machine import SupportVectorMachineRegressor __all__ = [ - "AdaBoost", - "DecisionTree", - "ElasticNetRegression", - "GradientBoosting", - "KNearestNeighbors", - "LassoRegression", - "LinearRegression", - "RandomForest", + "AdaBoostRegressor", + "DecisionTreeRegressor", + "ElasticNetRegressor", + "GradientBoostingRegressor", + "KNearestNeighborsRegressor", + "LassoRegressor", + "LinearRegressionRegressor", + "RandomForestRegressor", "Regressor", - "RidgeRegression", - "SupportVectorMachine", + "RidgeRegressor", + "SupportVectorMachineRegressor", ] diff --git a/src/safeds/ml/classical/regression/_ada_boost.py b/src/safeds/ml/classical/regression/_ada_boost.py index cb9abb757..ad037e96f 100644 --- a/src/safeds/ml/classical/regression/_ada_boost.py +++ b/src/safeds/ml/classical/regression/_ada_boost.py @@ -15,7 +15,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class AdaBoost(Regressor): +class AdaBoostRegressor(Regressor): """ Ada Boost regression. @@ -99,7 +99,7 @@ def learning_rate(self) -> float: """ return self._learning_rate - def fit(self, training_set: TaggedTable) -> AdaBoost: + def fit(self, training_set: TaggedTable) -> AdaBoostRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -112,7 +112,7 @@ def fit(self, training_set: TaggedTable) -> AdaBoost: Returns ------- - fitted_regressor : AdaBoost + fitted_regressor : AdaBoostRegressor The fitted regressor. Raises @@ -131,7 +131,7 @@ def fit(self, training_set: TaggedTable) -> AdaBoost: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = AdaBoost( + result = AdaBoostRegressor( learner=self._learner, maximum_number_of_learners=self._maximum_number_of_learners, learning_rate=self._learning_rate, diff --git a/src/safeds/ml/classical/regression/_decision_tree.py b/src/safeds/ml/classical/regression/_decision_tree.py index 91beb47fb..48bb5a95f 100644 --- a/src/safeds/ml/classical/regression/_decision_tree.py +++ b/src/safeds/ml/classical/regression/_decision_tree.py @@ -14,7 +14,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class DecisionTree(Regressor): +class DecisionTreeRegressor(Regressor): """Decision tree regression.""" def __init__(self) -> None: @@ -23,7 +23,7 @@ def __init__(self) -> None: self._feature_names: list[str] | None = None self._target_name: str | None = None - def fit(self, training_set: TaggedTable) -> DecisionTree: + def fit(self, training_set: TaggedTable) -> DecisionTreeRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -36,7 +36,7 @@ def fit(self, training_set: TaggedTable) -> DecisionTree: Returns ------- - fitted_regressor : DecisionTree + fitted_regressor : DecisionTreeRegressor The fitted regressor. Raises @@ -55,7 +55,7 @@ def fit(self, training_set: TaggedTable) -> DecisionTree: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = DecisionTree() + result = DecisionTreeRegressor() result._wrapped_regressor = wrapped_regressor result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/regression/_elastic_net_regression.py b/src/safeds/ml/classical/regression/_elastic_net_regression.py index 00a67d495..c7087529f 100644 --- a/src/safeds/ml/classical/regression/_elastic_net_regression.py +++ b/src/safeds/ml/classical/regression/_elastic_net_regression.py @@ -17,7 +17,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class ElasticNetRegression(Regressor): +class ElasticNetRegressor(Regressor): """Elastic net regression. Parameters @@ -104,7 +104,7 @@ def lasso_ratio(self) -> float: """ return self._lasso_ratio - def fit(self, training_set: TaggedTable) -> ElasticNetRegression: + def fit(self, training_set: TaggedTable) -> ElasticNetRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -117,7 +117,7 @@ def fit(self, training_set: TaggedTable) -> ElasticNetRegression: Returns ------- - fitted_regressor : ElasticNetRegression + fitted_regressor : ElasticNetRegressor The fitted regressor. Raises @@ -136,7 +136,7 @@ def fit(self, training_set: TaggedTable) -> ElasticNetRegression: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = ElasticNetRegression(alpha=self._alpha, lasso_ratio=self._lasso_ratio) + result = ElasticNetRegressor(alpha=self._alpha, lasso_ratio=self._lasso_ratio) result._wrapped_regressor = wrapped_regressor result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/regression/_gradient_boosting.py b/src/safeds/ml/classical/regression/_gradient_boosting.py index 5d5f3b6ed..fd47d5a7c 100644 --- a/src/safeds/ml/classical/regression/_gradient_boosting.py +++ b/src/safeds/ml/classical/regression/_gradient_boosting.py @@ -15,7 +15,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class GradientBoosting(Regressor): +class GradientBoostingRegressor(Regressor): """ Gradient boosting regression. @@ -74,7 +74,7 @@ def learning_rate(self) -> float: """ return self._learning_rate - def fit(self, training_set: TaggedTable) -> GradientBoosting: + def fit(self, training_set: TaggedTable) -> GradientBoostingRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -87,7 +87,7 @@ def fit(self, training_set: TaggedTable) -> GradientBoosting: Returns ------- - fitted_regressor : GradientBoosting + fitted_regressor : GradientBoostingRegressor The fitted regressor. Raises @@ -106,7 +106,7 @@ def fit(self, training_set: TaggedTable) -> GradientBoosting: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = GradientBoosting(number_of_trees=self._number_of_trees, learning_rate=self._learning_rate) + result = GradientBoostingRegressor(number_of_trees=self._number_of_trees, learning_rate=self._learning_rate) result._wrapped_regressor = wrapped_regressor result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/regression/_k_nearest_neighbors.py b/src/safeds/ml/classical/regression/_k_nearest_neighbors.py index fac6dad14..d2b8040cf 100644 --- a/src/safeds/ml/classical/regression/_k_nearest_neighbors.py +++ b/src/safeds/ml/classical/regression/_k_nearest_neighbors.py @@ -15,7 +15,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class KNearestNeighbors(Regressor): +class KNearestNeighborsRegressor(Regressor): """ K-nearest-neighbors regression. @@ -56,7 +56,7 @@ def number_of_neighbors(self) -> int: """ return self._number_of_neighbors - def fit(self, training_set: TaggedTable) -> KNearestNeighbors: + def fit(self, training_set: TaggedTable) -> KNearestNeighborsRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -69,7 +69,7 @@ def fit(self, training_set: TaggedTable) -> KNearestNeighbors: Returns ------- - fitted_regressor : KNearestNeighbors + fitted_regressor : KNearestNeighborsRegressor The fitted regressor. Raises @@ -100,7 +100,7 @@ def fit(self, training_set: TaggedTable) -> KNearestNeighbors: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = KNearestNeighbors(self._number_of_neighbors) + result = KNearestNeighborsRegressor(self._number_of_neighbors) result._wrapped_regressor = wrapped_regressor result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/regression/_lasso_regression.py b/src/safeds/ml/classical/regression/_lasso_regression.py index 1826ae9b6..6971f183d 100644 --- a/src/safeds/ml/classical/regression/_lasso_regression.py +++ b/src/safeds/ml/classical/regression/_lasso_regression.py @@ -16,7 +16,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class LassoRegression(Regressor): +class LassoRegressor(Regressor): """Lasso regression. Parameters @@ -64,7 +64,7 @@ def alpha(self) -> float: """ return self._alpha - def fit(self, training_set: TaggedTable) -> LassoRegression: + def fit(self, training_set: TaggedTable) -> LassoRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -77,7 +77,7 @@ def fit(self, training_set: TaggedTable) -> LassoRegression: Returns ------- - fitted_regressor : LassoRegression + fitted_regressor : LassoRegressor The fitted regressor. Raises @@ -96,7 +96,7 @@ def fit(self, training_set: TaggedTable) -> LassoRegression: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = LassoRegression(alpha=self._alpha) + result = LassoRegressor(alpha=self._alpha) result._wrapped_regressor = wrapped_regressor result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/regression/_linear_regression.py b/src/safeds/ml/classical/regression/_linear_regression.py index 1c60a7114..a3bbb381e 100644 --- a/src/safeds/ml/classical/regression/_linear_regression.py +++ b/src/safeds/ml/classical/regression/_linear_regression.py @@ -14,7 +14,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class LinearRegression(Regressor): +class LinearRegressionRegressor(Regressor): """Linear regression.""" def __init__(self) -> None: @@ -23,7 +23,7 @@ def __init__(self) -> None: self._feature_names: list[str] | None = None self._target_name: str | None = None - def fit(self, training_set: TaggedTable) -> LinearRegression: + def fit(self, training_set: TaggedTable) -> LinearRegressionRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -36,7 +36,7 @@ def fit(self, training_set: TaggedTable) -> LinearRegression: Returns ------- - fitted_regressor : LinearRegression + fitted_regressor : LinearRegressionRegressor The fitted regressor. Raises @@ -55,7 +55,7 @@ def fit(self, training_set: TaggedTable) -> LinearRegression: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = LinearRegression() + result = LinearRegressionRegressor() result._wrapped_regressor = wrapped_regressor result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/regression/_random_forest.py b/src/safeds/ml/classical/regression/_random_forest.py index 8dd27941d..0709ef92e 100644 --- a/src/safeds/ml/classical/regression/_random_forest.py +++ b/src/safeds/ml/classical/regression/_random_forest.py @@ -15,7 +15,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class RandomForest(Regressor): +class RandomForestRegressor(Regressor): """Random forest regression. Parameters @@ -54,7 +54,7 @@ def number_of_trees(self) -> int: """ return self._number_of_trees - def fit(self, training_set: TaggedTable) -> RandomForest: + def fit(self, training_set: TaggedTable) -> RandomForestRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -67,7 +67,7 @@ def fit(self, training_set: TaggedTable) -> RandomForest: Returns ------- - fitted_regressor : RandomForest + fitted_regressor : RandomForestRegressor The fitted regressor. Raises @@ -86,7 +86,7 @@ def fit(self, training_set: TaggedTable) -> RandomForest: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = RandomForest(number_of_trees=self._number_of_trees) + result = RandomForestRegressor(number_of_trees=self._number_of_trees) result._wrapped_regressor = wrapped_regressor result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/regression/_ridge_regression.py b/src/safeds/ml/classical/regression/_ridge_regression.py index 128551bf3..d267da9c8 100644 --- a/src/safeds/ml/classical/regression/_ridge_regression.py +++ b/src/safeds/ml/classical/regression/_ridge_regression.py @@ -16,7 +16,7 @@ from safeds.data.tabular.containers import Table, TaggedTable -class RidgeRegression(Regressor): +class RidgeRegressor(Regressor): """ Ridge regression. @@ -65,7 +65,7 @@ def alpha(self) -> float: """ return self._alpha - def fit(self, training_set: TaggedTable) -> RidgeRegression: + def fit(self, training_set: TaggedTable) -> RidgeRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -78,7 +78,7 @@ def fit(self, training_set: TaggedTable) -> RidgeRegression: Returns ------- - fitted_regressor : RidgeRegression + fitted_regressor : RidgeRegressor The fitted regressor. Raises @@ -97,7 +97,7 @@ def fit(self, training_set: TaggedTable) -> RidgeRegression: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = RidgeRegression(alpha=self._alpha) + result = RidgeRegressor(alpha=self._alpha) result._wrapped_regressor = wrapped_regressor result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/classical/regression/_support_vector_machine.py b/src/safeds/ml/classical/regression/_support_vector_machine.py index 7635a7ceb..711d544d7 100644 --- a/src/safeds/ml/classical/regression/_support_vector_machine.py +++ b/src/safeds/ml/classical/regression/_support_vector_machine.py @@ -30,7 +30,7 @@ def _get_sklearn_kernel(self) -> object: """ -class SupportVectorMachine(Regressor): +class SupportVectorMachineRegressor(Regressor): """ Support vector machine. @@ -151,18 +151,18 @@ def _get_kernel_name(self) -> str: TypeError If the kernel type is invalid. """ - if isinstance(self.kernel, SupportVectorMachine.Kernel.Linear): + if isinstance(self.kernel, SupportVectorMachineRegressor.Kernel.Linear): return "linear" - elif isinstance(self.kernel, SupportVectorMachine.Kernel.Polynomial): + elif isinstance(self.kernel, SupportVectorMachineRegressor.Kernel.Polynomial): return "poly" - elif isinstance(self.kernel, SupportVectorMachine.Kernel.Sigmoid): + elif isinstance(self.kernel, SupportVectorMachineRegressor.Kernel.Sigmoid): return "sigmoid" - elif isinstance(self.kernel, SupportVectorMachine.Kernel.RadialBasisFunction): + elif isinstance(self.kernel, SupportVectorMachineRegressor.Kernel.RadialBasisFunction): return "rbf" else: raise TypeError("Invalid kernel type.") - def fit(self, training_set: TaggedTable) -> SupportVectorMachine: + def fit(self, training_set: TaggedTable) -> SupportVectorMachineRegressor: """ Create a copy of this regressor and fit it with the given training data. @@ -175,7 +175,7 @@ def fit(self, training_set: TaggedTable) -> SupportVectorMachine: Returns ------- - fitted_regressor : SupportVectorMachine + fitted_regressor : SupportVectorMachineRegressor The fitted regressor. Raises @@ -194,7 +194,7 @@ def fit(self, training_set: TaggedTable) -> SupportVectorMachine: wrapped_regressor = self._get_sklearn_regressor() fit(wrapped_regressor, training_set) - result = SupportVectorMachine(c=self._c, kernel=self._kernel) + result = SupportVectorMachineRegressor(c=self._c, kernel=self._kernel) result._wrapped_regressor = wrapped_regressor result._feature_names = training_set.features.column_names result._target_name = training_set.target.name diff --git a/src/safeds/ml/nn/__init__.py b/src/safeds/ml/nn/__init__.py index e001f06f2..53b1f98d4 100644 --- a/src/safeds/ml/nn/__init__.py +++ b/src/safeds/ml/nn/__init__.py @@ -1,10 +1,10 @@ """Classes for classification tasks.""" from ._fnn_layer import FNNLayer -from ._model import ClassificationNeuralNetwork, RegressionNeuralNetwork +from ._model import NeuralNetworkClassifier, NeuralNetworkRegressor __all__ = [ "FNNLayer", - "ClassificationNeuralNetwork", - "RegressionNeuralNetwork", + "NeuralNetworkClassifier", + "NeuralNetworkRegressor", ] diff --git a/src/safeds/ml/nn/_model.py b/src/safeds/ml/nn/_model.py index 20b064f68..2eaece27f 100644 --- a/src/safeds/ml/nn/_model.py +++ b/src/safeds/ml/nn/_model.py @@ -10,7 +10,7 @@ from safeds.ml.nn._fnn_layer import FNNLayer -class RegressionNeuralNetwork: +class NeuralNetworkRegressor: def __init__(self, layers: list): self._model = _PytorchModel(layers, is_for_classification=False) self._batch_size = 1 @@ -133,7 +133,7 @@ def is_fitted(self) -> bool: return self._is_fitted -class ClassificationNeuralNetwork: +class NeuralNetworkClassifier: def __init__(self, layers: list[FNNLayer]): self._model = _PytorchModel(layers, is_for_classification=True) self._batch_size = 1 diff --git a/tests/safeds/ml/classical/classification/test_ada_boost.py b/tests/safeds/ml/classical/classification/test_ada_boost.py index 4ed3a339b..4ea80dc27 100644 --- a/tests/safeds/ml/classical/classification/test_ada_boost.py +++ b/tests/safeds/ml/classical/classification/test_ada_boost.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.classification import AdaBoost +from safeds.ml.classical.classification import AdaBoostClassifier @pytest.fixture() @@ -12,24 +12,24 @@ def training_set() -> TaggedTable: class TestLearner: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - learner = AdaBoost() - fitted_model = AdaBoost(learner=learner).fit(training_set) + learner = AdaBoostClassifier() + fitted_model = AdaBoostClassifier(learner=learner).fit(training_set) assert fitted_model.learner == learner def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - learner = AdaBoost() - fitted_model = AdaBoost(learner=learner).fit(training_set) + learner = AdaBoostClassifier() + fitted_model = AdaBoostClassifier(learner=learner).fit(training_set) assert fitted_model._wrapped_classifier is not None assert isinstance(fitted_model._wrapped_classifier.estimator, type(learner._get_sklearn_classifier())) class TestMaximumNumberOfLearners: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = AdaBoost(maximum_number_of_learners=2).fit(training_set) + fitted_model = AdaBoostClassifier(maximum_number_of_learners=2).fit(training_set) assert fitted_model.maximum_number_of_learners == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = AdaBoost(maximum_number_of_learners=2).fit(training_set) + fitted_model = AdaBoostClassifier(maximum_number_of_learners=2).fit(training_set) assert fitted_model._wrapped_classifier is not None assert fitted_model._wrapped_classifier.n_estimators == 2 @@ -39,16 +39,16 @@ def test_should_raise_if_less_than_or_equal_to_0(self, maximum_number_of_learner OutOfBoundsError, match=rf"maximum_number_of_learners \(={maximum_number_of_learners}\) is not inside \[1, \u221e\)\.", ): - AdaBoost(maximum_number_of_learners=maximum_number_of_learners) + AdaBoostClassifier(maximum_number_of_learners=maximum_number_of_learners) class TestLearningRate: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = AdaBoost(learning_rate=2).fit(training_set) + fitted_model = AdaBoostClassifier(learning_rate=2).fit(training_set) assert fitted_model.learning_rate == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = AdaBoost(learning_rate=2).fit(training_set) + fitted_model = AdaBoostClassifier(learning_rate=2).fit(training_set) assert fitted_model._wrapped_classifier is not None assert fitted_model._wrapped_classifier.learning_rate == 2 @@ -58,4 +58,4 @@ def test_should_raise_if_less_than_or_equal_to_0(self, learning_rate: float) -> OutOfBoundsError, match=rf"learning_rate \(={learning_rate}\) is not inside \(0, \u221e\)\.", ): - AdaBoost(learning_rate=learning_rate) + AdaBoostClassifier(learning_rate=learning_rate) diff --git a/tests/safeds/ml/classical/classification/test_classifier.py b/tests/safeds/ml/classical/classification/test_classifier.py index 2338544a0..979264535 100644 --- a/tests/safeds/ml/classical/classification/test_classifier.py +++ b/tests/safeds/ml/classical/classification/test_classifier.py @@ -15,14 +15,14 @@ UntaggedTableError, ) from safeds.ml.classical.classification import ( - AdaBoost, + AdaBoostClassifier, Classifier, - DecisionTree, - GradientBoosting, - KNearestNeighbors, - LogisticRegression, - RandomForest, - SupportVectorMachine, + DecisionTreeClassifier, + GradientBoostingClassifier, + KNearestNeighborsClassifier, + LogisticRegressionClassifier, + RandomForestClassifier, + SupportVectorMachineClassifier, ) if TYPE_CHECKING: @@ -43,13 +43,13 @@ def classifiers() -> list[Classifier]: The list of classifiers to test. """ return [ - AdaBoost(), - DecisionTree(), - GradientBoosting(), - KNearestNeighbors(2), - LogisticRegression(), - RandomForest(), - SupportVectorMachine(), + AdaBoostClassifier(), + DecisionTreeClassifier(), + GradientBoostingClassifier(), + KNearestNeighborsClassifier(2), + LogisticRegressionClassifier(), + RandomForestClassifier(), + SupportVectorMachineClassifier(), ] diff --git a/tests/safeds/ml/classical/classification/test_gradient_boosting.py b/tests/safeds/ml/classical/classification/test_gradient_boosting.py index 34fbd2b0b..d96532049 100644 --- a/tests/safeds/ml/classical/classification/test_gradient_boosting.py +++ b/tests/safeds/ml/classical/classification/test_gradient_boosting.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.classification import GradientBoosting +from safeds.ml.classical.classification import GradientBoostingClassifier @pytest.fixture() @@ -12,11 +12,11 @@ def training_set() -> TaggedTable: class TestNumberOfTrees: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = GradientBoosting(number_of_trees=2).fit(training_set) + fitted_model = GradientBoostingClassifier(number_of_trees=2).fit(training_set) assert fitted_model.number_of_trees == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = GradientBoosting(number_of_trees=2).fit(training_set) + fitted_model = GradientBoostingClassifier(number_of_trees=2).fit(training_set) assert fitted_model._wrapped_classifier is not None assert fitted_model._wrapped_classifier.n_estimators == 2 @@ -26,16 +26,16 @@ def test_should_raise_if_less_than_1(self, number_of_trees: int) -> None: OutOfBoundsError, match=rf"number_of_trees \(={number_of_trees}\) is not inside \[1, \u221e\)\.", ): - GradientBoosting(number_of_trees=number_of_trees) + GradientBoostingClassifier(number_of_trees=number_of_trees) class TestLearningRate: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = GradientBoosting(learning_rate=2).fit(training_set) + fitted_model = GradientBoostingClassifier(learning_rate=2).fit(training_set) assert fitted_model.learning_rate == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = GradientBoosting(learning_rate=2).fit(training_set) + fitted_model = GradientBoostingClassifier(learning_rate=2).fit(training_set) assert fitted_model._wrapped_classifier is not None assert fitted_model._wrapped_classifier.learning_rate == 2 @@ -45,4 +45,4 @@ def test_should_raise_if_less_than_or_equal_to_0(self, learning_rate: float) -> OutOfBoundsError, match=rf"learning_rate \(={learning_rate}\) is not inside \(0, \u221e\)\.", ): - GradientBoosting(learning_rate=learning_rate) + GradientBoostingClassifier(learning_rate=learning_rate) diff --git a/tests/safeds/ml/classical/classification/test_k_nearest_neighbors.py b/tests/safeds/ml/classical/classification/test_k_nearest_neighbors.py index 9677d9541..5df786901 100644 --- a/tests/safeds/ml/classical/classification/test_k_nearest_neighbors.py +++ b/tests/safeds/ml/classical/classification/test_k_nearest_neighbors.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.classification import KNearestNeighbors +from safeds.ml.classical.classification import KNearestNeighborsClassifier @pytest.fixture() @@ -12,11 +12,11 @@ def training_set() -> TaggedTable: class TestNumberOfNeighbors: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = KNearestNeighbors(number_of_neighbors=2).fit(training_set) + fitted_model = KNearestNeighborsClassifier(number_of_neighbors=2).fit(training_set) assert fitted_model.number_of_neighbors == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = KNearestNeighbors(number_of_neighbors=2).fit(training_set) + fitted_model = KNearestNeighborsClassifier(number_of_neighbors=2).fit(training_set) assert fitted_model._wrapped_classifier is not None assert fitted_model._wrapped_classifier.n_neighbors == 2 @@ -26,8 +26,8 @@ def test_should_raise_if_less_than_or_equal_to_0(self, number_of_neighbors: int) OutOfBoundsError, match=rf"number_of_neighbors \(={number_of_neighbors}\) is not inside \[1, \u221e\)\.", ): - KNearestNeighbors(number_of_neighbors=number_of_neighbors) + KNearestNeighborsClassifier(number_of_neighbors=number_of_neighbors) def test_should_raise_if_greater_than_sample_size(self, training_set: TaggedTable) -> None: with pytest.raises(ValueError, match="has to be less than or equal to the sample size"): - KNearestNeighbors(number_of_neighbors=5).fit(training_set) + KNearestNeighborsClassifier(number_of_neighbors=5).fit(training_set) diff --git a/tests/safeds/ml/classical/classification/test_random_forest.py b/tests/safeds/ml/classical/classification/test_random_forest.py index 794a242b9..46fb17ad2 100644 --- a/tests/safeds/ml/classical/classification/test_random_forest.py +++ b/tests/safeds/ml/classical/classification/test_random_forest.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.classification import RandomForest +from safeds.ml.classical.classification import RandomForestClassifier @pytest.fixture() @@ -12,11 +12,11 @@ def training_set() -> TaggedTable: class TestNumberOfTrees: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = RandomForest(number_of_trees=2).fit(training_set) + fitted_model = RandomForestClassifier(number_of_trees=2).fit(training_set) assert fitted_model.number_of_trees == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = RandomForest(number_of_trees=2).fit(training_set) + fitted_model = RandomForestClassifier(number_of_trees=2).fit(training_set) assert fitted_model._wrapped_classifier is not None assert fitted_model._wrapped_classifier.n_estimators == 2 @@ -26,4 +26,4 @@ def test_should_raise_if_less_than_or_equal_to_0(self, number_of_trees: int) -> OutOfBoundsError, match=rf"number_of_trees \(={number_of_trees}\) is not inside \[1, \u221e\)\.", ): - RandomForest(number_of_trees=number_of_trees) + RandomForestClassifier(number_of_trees=number_of_trees) diff --git a/tests/safeds/ml/classical/classification/test_support_vector_machine.py b/tests/safeds/ml/classical/classification/test_support_vector_machine.py index 5cbb69445..bb54b7d6d 100644 --- a/tests/safeds/ml/classical/classification/test_support_vector_machine.py +++ b/tests/safeds/ml/classical/classification/test_support_vector_machine.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.classification import SupportVectorMachine +from safeds.ml.classical.classification import SupportVectorMachineClassifier @pytest.fixture() @@ -12,75 +12,75 @@ def training_set() -> TaggedTable: class TestC: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = SupportVectorMachine(c=2).fit(training_set=training_set) + fitted_model = SupportVectorMachineClassifier(c=2).fit(training_set=training_set) assert fitted_model.c == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = SupportVectorMachine(c=2).fit(training_set) + fitted_model = SupportVectorMachineClassifier(c=2).fit(training_set) assert fitted_model._wrapped_classifier is not None assert fitted_model._wrapped_classifier.C == 2 @pytest.mark.parametrize("c", [-1.0, 0.0], ids=["minus_one", "zero"]) def test_should_raise_if_less_than_or_equal_to_0(self, c: float) -> None: with pytest.raises(OutOfBoundsError, match=rf"c \(={c}\) is not inside \(0, \u221e\)\."): - SupportVectorMachine(c=c) + SupportVectorMachineClassifier(c=c) class TestKernel: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - kernel = SupportVectorMachine.Kernel.Linear() - fitted_model = SupportVectorMachine(c=2, kernel=kernel).fit(training_set=training_set) - assert isinstance(fitted_model.kernel, SupportVectorMachine.Kernel.Linear) + kernel = SupportVectorMachineClassifier.Kernel.Linear() + fitted_model = SupportVectorMachineClassifier(c=2, kernel=kernel).fit(training_set=training_set) + assert isinstance(fitted_model.kernel, SupportVectorMachineClassifier.Kernel.Linear) def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - kernel = SupportVectorMachine.Kernel.Linear() - fitted_model = SupportVectorMachine(c=2, kernel=kernel).fit(training_set) + kernel = SupportVectorMachineClassifier.Kernel.Linear() + fitted_model = SupportVectorMachineClassifier(c=2, kernel=kernel).fit(training_set) assert fitted_model._wrapped_classifier is not None - assert isinstance(fitted_model.kernel, SupportVectorMachine.Kernel.Linear) + assert isinstance(fitted_model.kernel, SupportVectorMachineClassifier.Kernel.Linear) def test_should_get_sklearn_kernel_linear(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Linear()) - assert isinstance(svm.kernel, SupportVectorMachine.Kernel.Linear) + svm = SupportVectorMachineClassifier(c=2, kernel=SupportVectorMachineClassifier.Kernel.Linear()) + assert isinstance(svm.kernel, SupportVectorMachineClassifier.Kernel.Linear) linear_kernel = svm.kernel._get_sklearn_kernel() assert linear_kernel == "linear" @pytest.mark.parametrize("degree", [-1, 0], ids=["minus_one", "zero"]) def test_should_raise_if_degree_less_than_1(self, degree: int) -> None: with pytest.raises(OutOfBoundsError, match=rf"degree \(={degree}\) is not inside \[1, \u221e\)\."): - SupportVectorMachine.Kernel.Polynomial(degree=degree) + SupportVectorMachineClassifier.Kernel.Polynomial(degree=degree) def test_should_get_sklearn_kernel_polynomial(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Polynomial(degree=2)) - assert isinstance(svm.kernel, SupportVectorMachine.Kernel.Polynomial) + svm = SupportVectorMachineClassifier(c=2, kernel=SupportVectorMachineClassifier.Kernel.Polynomial(degree=2)) + assert isinstance(svm.kernel, SupportVectorMachineClassifier.Kernel.Polynomial) poly_kernel = svm.kernel._get_sklearn_kernel() assert poly_kernel == "poly" def test_should_get_sklearn_kernel_sigmoid(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Sigmoid()) - assert isinstance(svm.kernel, SupportVectorMachine.Kernel.Sigmoid) + svm = SupportVectorMachineClassifier(c=2, kernel=SupportVectorMachineClassifier.Kernel.Sigmoid()) + assert isinstance(svm.kernel, SupportVectorMachineClassifier.Kernel.Sigmoid) sigmoid_kernel = svm.kernel._get_sklearn_kernel() assert sigmoid_kernel == "sigmoid" def test_should_get_sklearn_kernel_rbf(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.RadialBasisFunction()) - assert isinstance(svm.kernel, SupportVectorMachine.Kernel.RadialBasisFunction) + svm = SupportVectorMachineClassifier(c=2, kernel=SupportVectorMachineClassifier.Kernel.RadialBasisFunction()) + assert isinstance(svm.kernel, SupportVectorMachineClassifier.Kernel.RadialBasisFunction) rbf_kernel = svm.kernel._get_sklearn_kernel() assert rbf_kernel == "rbf" def test_should_get_kernel_name(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Linear()) + svm = SupportVectorMachineClassifier(c=2, kernel=SupportVectorMachineClassifier.Kernel.Linear()) assert svm._get_kernel_name() == "linear" - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Polynomial(degree=2)) + svm = SupportVectorMachineClassifier(c=2, kernel=SupportVectorMachineClassifier.Kernel.Polynomial(degree=2)) assert svm._get_kernel_name() == "poly" - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Sigmoid()) + svm = SupportVectorMachineClassifier(c=2, kernel=SupportVectorMachineClassifier.Kernel.Sigmoid()) assert svm._get_kernel_name() == "sigmoid" - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.RadialBasisFunction()) + svm = SupportVectorMachineClassifier(c=2, kernel=SupportVectorMachineClassifier.Kernel.RadialBasisFunction()) assert svm._get_kernel_name() == "rbf" def test_should_get_kernel_name_invalid_kernel_type(self) -> None: - svm = SupportVectorMachine(c=2) + svm = SupportVectorMachineClassifier(c=2) with pytest.raises(TypeError, match="Invalid kernel type."): svm._get_kernel_name() diff --git a/tests/safeds/ml/classical/regression/test_ada_boost.py b/tests/safeds/ml/classical/regression/test_ada_boost.py index 52baec913..d85e6a691 100644 --- a/tests/safeds/ml/classical/regression/test_ada_boost.py +++ b/tests/safeds/ml/classical/regression/test_ada_boost.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.regression import AdaBoost +from safeds.ml.classical.regression import AdaBoostRegressor @pytest.fixture() @@ -12,24 +12,24 @@ def training_set() -> TaggedTable: class TestLearner: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - learner = AdaBoost() - fitted_model = AdaBoost(learner=learner).fit(training_set) + learner = AdaBoostRegressor() + fitted_model = AdaBoostRegressor(learner=learner).fit(training_set) assert fitted_model.learner == learner def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - learner = AdaBoost() - fitted_model = AdaBoost(learner=learner).fit(training_set) + learner = AdaBoostRegressor() + fitted_model = AdaBoostRegressor(learner=learner).fit(training_set) assert fitted_model._wrapped_regressor is not None assert isinstance(fitted_model._wrapped_regressor.estimator, type(learner._get_sklearn_regressor())) class TestMaximumNumberOfLearners: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = AdaBoost(maximum_number_of_learners=2).fit(training_set) + fitted_model = AdaBoostRegressor(maximum_number_of_learners=2).fit(training_set) assert fitted_model.maximum_number_of_learners == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = AdaBoost(maximum_number_of_learners=2).fit(training_set) + fitted_model = AdaBoostRegressor(maximum_number_of_learners=2).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.n_estimators == 2 @@ -39,16 +39,16 @@ def test_should_raise_if_less_than_or_equal_to_0(self, maximum_number_of_learner OutOfBoundsError, match=rf"maximum_number_of_learners \(={maximum_number_of_learners}\) is not inside \[1, \u221e\)\.", ): - AdaBoost(maximum_number_of_learners=maximum_number_of_learners) + AdaBoostRegressor(maximum_number_of_learners=maximum_number_of_learners) class TestLearningRate: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = AdaBoost(learning_rate=2).fit(training_set) + fitted_model = AdaBoostRegressor(learning_rate=2).fit(training_set) assert fitted_model.learning_rate == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = AdaBoost(learning_rate=2).fit(training_set) + fitted_model = AdaBoostRegressor(learning_rate=2).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.learning_rate == 2 @@ -58,4 +58,4 @@ def test_should_raise_if_less_than_or_equal_to_0(self, learning_rate: float) -> OutOfBoundsError, match=rf"learning_rate \(={learning_rate}\) is not inside \(0, \u221e\)\.", ): - AdaBoost(learning_rate=learning_rate) + AdaBoostRegressor(learning_rate=learning_rate) diff --git a/tests/safeds/ml/classical/regression/test_elastic_net_regression.py b/tests/safeds/ml/classical/regression/test_elastic_net_regression.py index b225c9c78..0df5a19fc 100644 --- a/tests/safeds/ml/classical/regression/test_elastic_net_regression.py +++ b/tests/safeds/ml/classical/regression/test_elastic_net_regression.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.regression import ElasticNetRegression +from safeds.ml.classical.regression import ElasticNetRegressor @pytest.fixture() @@ -12,11 +12,11 @@ def training_set() -> TaggedTable: class TestAlpha: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = ElasticNetRegression(alpha=1).fit(training_set) + fitted_model = ElasticNetRegressor(alpha=1).fit(training_set) assert fitted_model.alpha == 1 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = ElasticNetRegression(alpha=1).fit(training_set) + fitted_model = ElasticNetRegressor(alpha=1).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.alpha == 1 @@ -26,7 +26,7 @@ def test_should_raise_if_less_than_0(self, alpha: float) -> None: OutOfBoundsError, match=rf"alpha \(={alpha}\) is not inside \[0, \u221e\)\.", ): - ElasticNetRegression(alpha=alpha) + ElasticNetRegressor(alpha=alpha) def test_should_warn_if_equal_to_0(self) -> None: with pytest.warns( @@ -36,16 +36,16 @@ def test_should_warn_if_equal_to_0(self) -> None: "should use LinearRegression instead for better numerical stability." ), ): - ElasticNetRegression(alpha=0) + ElasticNetRegressor(alpha=0) class TestLassoRatio: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = ElasticNetRegression(lasso_ratio=0.3).fit(training_set) + fitted_model = ElasticNetRegressor(lasso_ratio=0.3).fit(training_set) assert fitted_model.lasso_ratio == 0.3 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = ElasticNetRegression(lasso_ratio=0.3).fit(training_set) + fitted_model = ElasticNetRegressor(lasso_ratio=0.3).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.l1_ratio == 0.3 @@ -55,7 +55,7 @@ def test_should_raise_if_not_between_0_and_1(self, lasso_ratio: float) -> None: OutOfBoundsError, match=rf"lasso_ratio \(={lasso_ratio}\) is not inside \[0, 1\]\.", ): - ElasticNetRegression(lasso_ratio=lasso_ratio) + ElasticNetRegressor(lasso_ratio=lasso_ratio) def test_should_warn_if_0(self) -> None: with pytest.warns( @@ -65,7 +65,7 @@ def test_should_warn_if_0(self) -> None: " Use RidgeRegression instead for better numerical stability." ), ): - ElasticNetRegression(lasso_ratio=0) + ElasticNetRegressor(lasso_ratio=0) def test_should_warn_if_1(self) -> None: with pytest.warns( @@ -75,4 +75,4 @@ def test_should_warn_if_1(self) -> None: " Use LassoRegression instead for better numerical stability." ), ): - ElasticNetRegression(lasso_ratio=1) + ElasticNetRegressor(lasso_ratio=1) diff --git a/tests/safeds/ml/classical/regression/test_gradient_boosting.py b/tests/safeds/ml/classical/regression/test_gradient_boosting.py index 0243551ae..d6e7b014c 100644 --- a/tests/safeds/ml/classical/regression/test_gradient_boosting.py +++ b/tests/safeds/ml/classical/regression/test_gradient_boosting.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.regression import GradientBoosting +from safeds.ml.classical.regression import GradientBoostingRegressor @pytest.fixture() @@ -12,11 +12,11 @@ def training_set() -> TaggedTable: class TestNumberOfTrees: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = GradientBoosting(number_of_trees=2).fit(training_set) + fitted_model = GradientBoostingRegressor(number_of_trees=2).fit(training_set) assert fitted_model.number_of_trees == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = GradientBoosting(number_of_trees=2).fit(training_set) + fitted_model = GradientBoostingRegressor(number_of_trees=2).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.n_estimators == 2 @@ -26,16 +26,16 @@ def test_should_raise_if_less_than_1(self, number_of_trees: int) -> None: OutOfBoundsError, match=rf"number_of_trees \(={number_of_trees}\) is not inside \[1, \u221e\)\.", ): - GradientBoosting(number_of_trees=number_of_trees) + GradientBoostingRegressor(number_of_trees=number_of_trees) class TestLearningRate: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = GradientBoosting(learning_rate=2).fit(training_set) + fitted_model = GradientBoostingRegressor(learning_rate=2).fit(training_set) assert fitted_model.learning_rate == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = GradientBoosting(learning_rate=2).fit(training_set) + fitted_model = GradientBoostingRegressor(learning_rate=2).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.learning_rate == 2 @@ -45,4 +45,4 @@ def test_should_raise_if_less_than_or_equal_to_0(self, learning_rate: float) -> OutOfBoundsError, match=rf"learning_rate \(={learning_rate}\) is not inside \(0, \u221e\)\.", ): - GradientBoosting(learning_rate=learning_rate) + GradientBoostingRegressor(learning_rate=learning_rate) diff --git a/tests/safeds/ml/classical/regression/test_k_nearest_neighbors.py b/tests/safeds/ml/classical/regression/test_k_nearest_neighbors.py index 69bb27cb4..09394f863 100644 --- a/tests/safeds/ml/classical/regression/test_k_nearest_neighbors.py +++ b/tests/safeds/ml/classical/regression/test_k_nearest_neighbors.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.regression import KNearestNeighbors +from safeds.ml.classical.regression import KNearestNeighborsRegressor @pytest.fixture() @@ -12,11 +12,11 @@ def training_set() -> TaggedTable: class TestNumberOfNeighbors: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = KNearestNeighbors(number_of_neighbors=2).fit(training_set) + fitted_model = KNearestNeighborsRegressor(number_of_neighbors=2).fit(training_set) assert fitted_model.number_of_neighbors == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = KNearestNeighbors(number_of_neighbors=2).fit(training_set) + fitted_model = KNearestNeighborsRegressor(number_of_neighbors=2).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.n_neighbors == 2 @@ -26,8 +26,8 @@ def test_should_raise_if_less_than_or_equal_to_0(self, number_of_neighbors: int) OutOfBoundsError, match=rf"number_of_neighbors \(={number_of_neighbors}\) is not inside \[1, \u221e\)\.", ): - KNearestNeighbors(number_of_neighbors=number_of_neighbors) + KNearestNeighborsRegressor(number_of_neighbors=number_of_neighbors) def test_should_raise_if_greater_than_sample_size(self, training_set: TaggedTable) -> None: with pytest.raises(ValueError, match="has to be less than or equal to the sample size"): - KNearestNeighbors(number_of_neighbors=5).fit(training_set) + KNearestNeighborsRegressor(number_of_neighbors=5).fit(training_set) diff --git a/tests/safeds/ml/classical/regression/test_lasso_regression.py b/tests/safeds/ml/classical/regression/test_lasso_regression.py index 6705aa948..fc550d6f5 100644 --- a/tests/safeds/ml/classical/regression/test_lasso_regression.py +++ b/tests/safeds/ml/classical/regression/test_lasso_regression.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.regression import LassoRegression +from safeds.ml.classical.regression import LassoRegressor @pytest.fixture() @@ -12,18 +12,18 @@ def training_set() -> TaggedTable: class TestAlpha: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = LassoRegression(alpha=1).fit(training_set) + fitted_model = LassoRegressor(alpha=1).fit(training_set) assert fitted_model.alpha == 1 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = LassoRegression(alpha=1).fit(training_set) + fitted_model = LassoRegressor(alpha=1).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.alpha == 1 @pytest.mark.parametrize("alpha", [-0.5], ids=["minus_zero_point_5"]) def test_should_raise_if_less_than_0(self, alpha: float) -> None: with pytest.raises(OutOfBoundsError, match=rf"alpha \(={alpha}\) is not inside \[0, \u221e\)\."): - LassoRegression(alpha=alpha) + LassoRegressor(alpha=alpha) def test_should_warn_if_equal_to_0(self) -> None: with pytest.warns( @@ -33,4 +33,4 @@ def test_should_warn_if_equal_to_0(self) -> None: "should use LinearRegression instead for better numerical stability." ), ): - LassoRegression(alpha=0) + LassoRegressor(alpha=0) diff --git a/tests/safeds/ml/classical/regression/test_random_forest.py b/tests/safeds/ml/classical/regression/test_random_forest.py index 409f7fca3..02e4416cb 100644 --- a/tests/safeds/ml/classical/regression/test_random_forest.py +++ b/tests/safeds/ml/classical/regression/test_random_forest.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.regression import RandomForest +from safeds.ml.classical.regression import RandomForestRegressor @pytest.fixture() @@ -12,11 +12,11 @@ def training_set() -> TaggedTable: class TestNumberOfTrees: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = RandomForest(number_of_trees=2).fit(training_set) + fitted_model = RandomForestRegressor(number_of_trees=2).fit(training_set) assert fitted_model.number_of_trees == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = RandomForest(number_of_trees=2).fit(training_set) + fitted_model = RandomForestRegressor(number_of_trees=2).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.n_estimators == 2 @@ -26,4 +26,4 @@ def test_should_raise_if_less_than_or_equal_to_0(self, number_of_trees: int) -> OutOfBoundsError, match=rf"number_of_trees \(={number_of_trees}\) is not inside \[1, \u221e\)\.", ): - RandomForest(number_of_trees=number_of_trees) + RandomForestRegressor(number_of_trees=number_of_trees) diff --git a/tests/safeds/ml/classical/regression/test_regressor.py b/tests/safeds/ml/classical/regression/test_regressor.py index c1d648018..a4b43b9b1 100644 --- a/tests/safeds/ml/classical/regression/test_regressor.py +++ b/tests/safeds/ml/classical/regression/test_regressor.py @@ -17,17 +17,17 @@ UntaggedTableError, ) from safeds.ml.classical.regression import ( - AdaBoost, - DecisionTree, - ElasticNetRegression, - GradientBoosting, - KNearestNeighbors, - LassoRegression, - LinearRegression, - RandomForest, + AdaBoostRegressor, + DecisionTreeRegressor, + ElasticNetRegressor, + GradientBoostingRegressor, + KNearestNeighborsRegressor, + LassoRegressor, + LinearRegressionRegressor, + RandomForestRegressor, Regressor, - RidgeRegression, - SupportVectorMachine, + RidgeRegressor, + SupportVectorMachineRegressor, ) # noinspection PyProtectedMember @@ -51,16 +51,16 @@ def regressors() -> list[Regressor]: The list of regressors to test. """ return [ - AdaBoost(), - DecisionTree(), - ElasticNetRegression(), - GradientBoosting(), - KNearestNeighbors(2), - LassoRegression(), - LinearRegression(), - RandomForest(), - RidgeRegression(), - SupportVectorMachine(), + AdaBoostRegressor(), + DecisionTreeRegressor(), + ElasticNetRegressor(), + GradientBoostingRegressor(), + KNearestNeighborsRegressor(2), + LassoRegressor(), + LinearRegressionRegressor(), + RandomForestRegressor(), + RidgeRegressor(), + SupportVectorMachineRegressor(), ] diff --git a/tests/safeds/ml/classical/regression/test_ridge_regression.py b/tests/safeds/ml/classical/regression/test_ridge_regression.py index 954fb6bbf..fb5a36771 100644 --- a/tests/safeds/ml/classical/regression/test_ridge_regression.py +++ b/tests/safeds/ml/classical/regression/test_ridge_regression.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.regression import RidgeRegression +from safeds.ml.classical.regression import RidgeRegressor @pytest.fixture() @@ -12,18 +12,18 @@ def training_set() -> TaggedTable: class TestAlpha: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = RidgeRegression(alpha=1).fit(training_set) + fitted_model = RidgeRegressor(alpha=1).fit(training_set) assert fitted_model.alpha == 1 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = RidgeRegression(alpha=1).fit(training_set) + fitted_model = RidgeRegressor(alpha=1).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.alpha == 1 @pytest.mark.parametrize("alpha", [-0.5], ids=["minus_zero_point_5"]) def test_should_raise_if_less_than_0(self, alpha: float) -> None: with pytest.raises(OutOfBoundsError, match=rf"alpha \(={alpha}\) is not inside \[0, \u221e\)\."): - RidgeRegression(alpha=alpha) + RidgeRegressor(alpha=alpha) def test_should_warn_if_equal_to_0(self) -> None: with pytest.warns( @@ -33,4 +33,4 @@ def test_should_warn_if_equal_to_0(self) -> None: "should use LinearRegression instead for better numerical stability." ), ): - RidgeRegression(alpha=0) + RidgeRegressor(alpha=0) diff --git a/tests/safeds/ml/classical/regression/test_support_vector_machine.py b/tests/safeds/ml/classical/regression/test_support_vector_machine.py index 853cf71d1..54495d0ae 100644 --- a/tests/safeds/ml/classical/regression/test_support_vector_machine.py +++ b/tests/safeds/ml/classical/regression/test_support_vector_machine.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import OutOfBoundsError -from safeds.ml.classical.regression import SupportVectorMachine +from safeds.ml.classical.regression import SupportVectorMachineRegressor @pytest.fixture() @@ -12,75 +12,75 @@ def training_set() -> TaggedTable: class TestC: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - fitted_model = SupportVectorMachine(c=2).fit(training_set=training_set) + fitted_model = SupportVectorMachineRegressor(c=2).fit(training_set=training_set) assert fitted_model.c == 2 def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - fitted_model = SupportVectorMachine(c=2).fit(training_set) + fitted_model = SupportVectorMachineRegressor(c=2).fit(training_set) assert fitted_model._wrapped_regressor is not None assert fitted_model._wrapped_regressor.C == 2 @pytest.mark.parametrize("c", [-1.0, 0.0], ids=["minus_one", "zero"]) def test_should_raise_if_less_than_or_equal_to_0(self, c: float) -> None: with pytest.raises(OutOfBoundsError, match=rf"c \(={c}\) is not inside \(0, \u221e\)\."): - SupportVectorMachine(c=c) + SupportVectorMachineRegressor(c=c) class TestKernel: def test_should_be_passed_to_fitted_model(self, training_set: TaggedTable) -> None: - kernel = SupportVectorMachine.Kernel.Linear() - fitted_model = SupportVectorMachine(c=2, kernel=kernel).fit(training_set=training_set) - assert isinstance(fitted_model.kernel, SupportVectorMachine.Kernel.Linear) + kernel = SupportVectorMachineRegressor.Kernel.Linear() + fitted_model = SupportVectorMachineRegressor(c=2, kernel=kernel).fit(training_set=training_set) + assert isinstance(fitted_model.kernel, SupportVectorMachineRegressor.Kernel.Linear) def test_should_be_passed_to_sklearn(self, training_set: TaggedTable) -> None: - kernel = SupportVectorMachine.Kernel.Linear() - fitted_model = SupportVectorMachine(c=2, kernel=kernel).fit(training_set) + kernel = SupportVectorMachineRegressor.Kernel.Linear() + fitted_model = SupportVectorMachineRegressor(c=2, kernel=kernel).fit(training_set) assert fitted_model._wrapped_regressor is not None - assert isinstance(fitted_model.kernel, SupportVectorMachine.Kernel.Linear) + assert isinstance(fitted_model.kernel, SupportVectorMachineRegressor.Kernel.Linear) def test_should_get_sklearn_kernel_linear(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Linear()) - assert isinstance(svm.kernel, SupportVectorMachine.Kernel.Linear) + svm = SupportVectorMachineRegressor(c=2, kernel=SupportVectorMachineRegressor.Kernel.Linear()) + assert isinstance(svm.kernel, SupportVectorMachineRegressor.Kernel.Linear) linear_kernel = svm.kernel._get_sklearn_kernel() assert linear_kernel == "linear" @pytest.mark.parametrize("degree", [-1, 0], ids=["minus_one", "zero"]) def test_should_raise_if_degree_less_than_1(self, degree: int) -> None: with pytest.raises(OutOfBoundsError, match=rf"degree \(={degree}\) is not inside \[1, \u221e\)\."): - SupportVectorMachine.Kernel.Polynomial(degree=degree) + SupportVectorMachineRegressor.Kernel.Polynomial(degree=degree) def test_should_get_sklearn_kernel_polynomial(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Polynomial(degree=2)) - assert isinstance(svm.kernel, SupportVectorMachine.Kernel.Polynomial) + svm = SupportVectorMachineRegressor(c=2, kernel=SupportVectorMachineRegressor.Kernel.Polynomial(degree=2)) + assert isinstance(svm.kernel, SupportVectorMachineRegressor.Kernel.Polynomial) poly_kernel = svm.kernel._get_sklearn_kernel() assert poly_kernel == "poly" def test_should_get_sklearn_kernel_sigmoid(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Sigmoid()) - assert isinstance(svm.kernel, SupportVectorMachine.Kernel.Sigmoid) + svm = SupportVectorMachineRegressor(c=2, kernel=SupportVectorMachineRegressor.Kernel.Sigmoid()) + assert isinstance(svm.kernel, SupportVectorMachineRegressor.Kernel.Sigmoid) sigmoid_kernel = svm.kernel._get_sklearn_kernel() assert sigmoid_kernel == "sigmoid" def test_should_get_sklearn_kernel_rbf(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.RadialBasisFunction()) - assert isinstance(svm.kernel, SupportVectorMachine.Kernel.RadialBasisFunction) + svm = SupportVectorMachineRegressor(c=2, kernel=SupportVectorMachineRegressor.Kernel.RadialBasisFunction()) + assert isinstance(svm.kernel, SupportVectorMachineRegressor.Kernel.RadialBasisFunction) rbf_kernel = svm.kernel._get_sklearn_kernel() assert rbf_kernel == "rbf" def test_should_get_kernel_name(self) -> None: - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Linear()) + svm = SupportVectorMachineRegressor(c=2, kernel=SupportVectorMachineRegressor.Kernel.Linear()) assert svm._get_kernel_name() == "linear" - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Polynomial(degree=2)) + svm = SupportVectorMachineRegressor(c=2, kernel=SupportVectorMachineRegressor.Kernel.Polynomial(degree=2)) assert svm._get_kernel_name() == "poly" - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.Sigmoid()) + svm = SupportVectorMachineRegressor(c=2, kernel=SupportVectorMachineRegressor.Kernel.Sigmoid()) assert svm._get_kernel_name() == "sigmoid" - svm = SupportVectorMachine(c=2, kernel=SupportVectorMachine.Kernel.RadialBasisFunction()) + svm = SupportVectorMachineRegressor(c=2, kernel=SupportVectorMachineRegressor.Kernel.RadialBasisFunction()) assert svm._get_kernel_name() == "rbf" def test_should_get_kernel_name_invalid_kernel_type(self) -> None: - svm = SupportVectorMachine(c=2) + svm = SupportVectorMachineRegressor(c=2) with pytest.raises(TypeError, match="Invalid kernel type."): svm._get_kernel_name() diff --git a/tests/safeds/ml/classical/test_util_sklearn.py b/tests/safeds/ml/classical/test_util_sklearn.py index 82f57576b..aab12cae3 100644 --- a/tests/safeds/ml/classical/test_util_sklearn.py +++ b/tests/safeds/ml/classical/test_util_sklearn.py @@ -5,14 +5,14 @@ from safeds.data.tabular.containers import Table from safeds.exceptions import LearningError, PredictionError from safeds.ml.classical._util_sklearn import fit, predict -from safeds.ml.classical.regression import LinearRegression +from safeds.ml.classical.regression import LinearRegressionRegressor def test_predict_should_not_warn_about_feature_names() -> None: """See https://github.com/Safe-DS/Library/issues/51.""" training_set = Table({"a": [1, 2, 3], "b": [2, 4, 6]}).tag_columns(target_name="b") - model = LinearRegression() + model = LinearRegressionRegressor() fitted_model = model.fit(training_set) test_set = Table({"a": [4, 5, 6]}) diff --git a/tests/safeds/ml/nn/test_model.py b/tests/safeds/ml/nn/test_model.py index 3214b171c..3dc62bba6 100644 --- a/tests/safeds/ml/nn/test_model.py +++ b/tests/safeds/ml/nn/test_model.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import ModelNotFittedError, OutOfBoundsError -from safeds.ml.nn import ClassificationNeuralNetwork, FNNLayer, RegressionNeuralNetwork +from safeds.ml.nn import NeuralNetworkClassifier, FNNLayer, NeuralNetworkRegressor class TestClassificationModel: @@ -17,7 +17,7 @@ def test_should_raise_if_epoch_size_out_of_bounds(self, epoch_size: int) -> None OutOfBoundsError, match=rf"epoch_size \(={epoch_size}\) is not inside \[1, \u221e\)\.", ): - ClassificationNeuralNetwork([FNNLayer(1, 1)]).fit( + NeuralNetworkClassifier([FNNLayer(1, 1)]).fit( Table.from_dict({"a": [1], "b": [2]}).tag_columns("a"), epoch_size=epoch_size, ) @@ -34,21 +34,21 @@ def test_should_raise_if_batch_size_out_of_bounds(self, batch_size: int) -> None OutOfBoundsError, match=rf"batch_size \(={batch_size}\) is not inside \[1, \u221e\)\.", ): - ClassificationNeuralNetwork([FNNLayer(input_size=1, output_size=1)]).fit( + NeuralNetworkClassifier([FNNLayer(input_size=1, output_size=1)]).fit( Table.from_dict({"a": [1], "b": [2]}).tag_columns("a"), batch_size=batch_size, ) def test_should_raise_if_fit_function_returns_wrong_datatype(self) -> None: - fitted_model = ClassificationNeuralNetwork( + fitted_model = NeuralNetworkClassifier( [FNNLayer(input_size=1, output_size=8), FNNLayer(output_size=1)], ).fit( Table.from_dict({"a": [1], "b": [0]}).tag_columns("a"), ) - assert isinstance(fitted_model, ClassificationNeuralNetwork) + assert isinstance(fitted_model, NeuralNetworkClassifier) def test_should_raise_if_predict_function_returns_wrong_datatype(self) -> None: - fitted_model = ClassificationNeuralNetwork( + fitted_model = NeuralNetworkClassifier( [FNNLayer(input_size=1, output_size=8), FNNLayer(output_size=1)], ).fit( Table.from_dict({"a": [1, 0], "b": [0, 1]}).tag_columns("a"), @@ -57,7 +57,7 @@ def test_should_raise_if_predict_function_returns_wrong_datatype(self) -> None: assert isinstance(predictions, TaggedTable) def test_should_raise_if_predict_function_returns_wrong_datatype_for_multiclass_classification(self) -> None: - fitted_model = ClassificationNeuralNetwork( + fitted_model = NeuralNetworkClassifier( [FNNLayer(input_size=1, output_size=8), FNNLayer(output_size=3)], ).fit( Table.from_dict({"a": [0, 1, 2], "b": [0, 15, 51]}).tag_columns("a"), @@ -67,12 +67,12 @@ def test_should_raise_if_predict_function_returns_wrong_datatype_for_multiclass_ def test_should_raise_if_model_has_not_been_fitted(self) -> None: with pytest.raises(ModelNotFittedError, match="The model has not been fitted yet."): - ClassificationNeuralNetwork([FNNLayer(input_size=1, output_size=1)]).predict( + NeuralNetworkClassifier([FNNLayer(input_size=1, output_size=1)]).predict( Table.from_dict({"a": [1]}), ) def test_should_raise_if_is_fitted_is_set_correctly_for_binary_classification(self) -> None: - model = ClassificationNeuralNetwork([FNNLayer(input_size=1, output_size=1)]) + model = NeuralNetworkClassifier([FNNLayer(input_size=1, output_size=1)]) assert not model.is_fitted model = model.fit( Table.from_dict({"a": [1], "b": [0]}).tag_columns("a"), @@ -80,7 +80,7 @@ def test_should_raise_if_is_fitted_is_set_correctly_for_binary_classification(se assert model.is_fitted def test_should_raise_if_is_fitted_is_set_correctly_for_multiclass_classification(self) -> None: - model = ClassificationNeuralNetwork([FNNLayer(input_size=1, output_size=1), FNNLayer(output_size=3)]) + model = NeuralNetworkClassifier([FNNLayer(input_size=1, output_size=1), FNNLayer(output_size=3)]) assert not model.is_fitted model = model.fit( Table.from_dict({"a": [1, 0, 2], "b": [0, 15, 5]}).tag_columns("a"), @@ -88,7 +88,7 @@ def test_should_raise_if_is_fitted_is_set_correctly_for_multiclass_classificatio assert model.is_fitted def test_should_raise_if_fit_doesnt_batch_callback(self) -> None: - model = ClassificationNeuralNetwork([FNNLayer(input_size=1, output_size=1)]) + model = NeuralNetworkClassifier([FNNLayer(input_size=1, output_size=1)]) class Test: self.was_called = False @@ -106,7 +106,7 @@ def callback_was_called(self) -> bool: assert obj.callback_was_called() is True def test_should_raise_if_fit_doesnt_epoch_callback(self) -> None: - model = ClassificationNeuralNetwork([FNNLayer(input_size=1, output_size=1)]) + model = NeuralNetworkClassifier([FNNLayer(input_size=1, output_size=1)]) class Test: self.was_called = False @@ -137,7 +137,7 @@ def test_should_raise_if_epoch_size_out_of_bounds(self, epoch_size: int) -> None OutOfBoundsError, match=rf"epoch_size \(={epoch_size}\) is not inside \[1, \u221e\)\.", ): - RegressionNeuralNetwork([FNNLayer(input_size=1, output_size=1)]).fit( + NeuralNetworkRegressor([FNNLayer(input_size=1, output_size=1)]).fit( Table.from_dict({"a": [1], "b": [2]}).tag_columns("a"), epoch_size=epoch_size, ) @@ -154,19 +154,19 @@ def test_should_raise_if_batch_size_out_of_bounds(self, batch_size: int) -> None OutOfBoundsError, match=rf"batch_size \(={batch_size}\) is not inside \[1, \u221e\)\.", ): - RegressionNeuralNetwork([FNNLayer(input_size=1, output_size=1)]).fit( + NeuralNetworkRegressor([FNNLayer(input_size=1, output_size=1)]).fit( Table.from_dict({"a": [1], "b": [2]}).tag_columns("a"), batch_size=batch_size, ) def test_should_raise_if_fit_function_returns_wrong_datatype(self) -> None: - fitted_model = RegressionNeuralNetwork([FNNLayer(input_size=1, output_size=1)]).fit( + fitted_model = NeuralNetworkRegressor([FNNLayer(input_size=1, output_size=1)]).fit( Table.from_dict({"a": [1], "b": [2]}).tag_columns("a"), ) - assert isinstance(fitted_model, RegressionNeuralNetwork) + assert isinstance(fitted_model, NeuralNetworkRegressor) def test_should_raise_if_predict_function_returns_wrong_datatype(self) -> None: - fitted_model = RegressionNeuralNetwork([FNNLayer(input_size=1, output_size=1)]).fit( + fitted_model = NeuralNetworkRegressor([FNNLayer(input_size=1, output_size=1)]).fit( Table.from_dict({"a": [1], "b": [2]}).tag_columns("a"), ) predictions = fitted_model.predict(Table.from_dict({"b": [1]})) @@ -174,12 +174,12 @@ def test_should_raise_if_predict_function_returns_wrong_datatype(self) -> None: def test_should_raise_if_model_has_not_been_fitted(self) -> None: with pytest.raises(ModelNotFittedError, match="The model has not been fitted yet."): - RegressionNeuralNetwork([FNNLayer(input_size=1, output_size=1)]).predict( + NeuralNetworkRegressor([FNNLayer(input_size=1, output_size=1)]).predict( Table.from_dict({"a": [1]}), ) def test_should_raise_if_is_fitted_is_set_correctly(self) -> None: - model = RegressionNeuralNetwork([FNNLayer(input_size=1, output_size=1)]) + model = NeuralNetworkRegressor([FNNLayer(input_size=1, output_size=1)]) assert not model.is_fitted model = model.fit( Table.from_dict({"a": [1], "b": [0]}).tag_columns("a"), @@ -187,7 +187,7 @@ def test_should_raise_if_is_fitted_is_set_correctly(self) -> None: assert model.is_fitted def test_should_raise_if_fit_doesnt_batch_callback(self) -> None: - model = RegressionNeuralNetwork([FNNLayer(input_size=1, output_size=1)]) + model = NeuralNetworkRegressor([FNNLayer(input_size=1, output_size=1)]) class Test: self.was_called = False @@ -205,7 +205,7 @@ def callback_was_called(self) -> bool: assert obj.callback_was_called() is True def test_should_raise_if_fit_doesnt_epoch_callback(self) -> None: - model = RegressionNeuralNetwork([FNNLayer(input_size=1, output_size=1)]) + model = NeuralNetworkRegressor([FNNLayer(input_size=1, output_size=1)]) class Test: self.was_called = False From 8ada0b4c0c235e7ba485ac05bee3aa4a8b1be541 Mon Sep 17 00:00:00 2001 From: megalinter-bot <129584137+megalinter-bot@users.noreply.github.com> Date: Sat, 30 Mar 2024 15:59:58 +0000 Subject: [PATCH 2/4] style: apply automated linter fixes --- .../classification/test_classifier.py | 34 +++++++++++++++---- .../ml/classical/regression/test_regressor.py | 30 ++++++++++++---- tests/safeds/ml/nn/test_model.py | 2 +- 3 files changed, 52 insertions(+), 14 deletions(-) diff --git a/tests/safeds/ml/classical/classification/test_classifier.py b/tests/safeds/ml/classical/classification/test_classifier.py index 979264535..6e0b981b0 100644 --- a/tests/safeds/ml/classical/classification/test_classifier.py +++ b/tests/safeds/ml/classical/classification/test_classifier.py @@ -262,21 +262,41 @@ def test_should_return_true_after_fitting(self, classifier: Classifier, valid_da class TestHash: - @pytest.mark.parametrize(("classifier1", "classifier2"), ([(x, y) for x in classifiers() for y in classifiers() if x.__class__ == y.__class__]), ids=lambda x: x.__class__.__name__) - def test_should_return_same_hash_for_equal_classifier(self, classifier1: Classifier, classifier2: Classifier) -> None: + @pytest.mark.parametrize( + ("classifier1", "classifier2"), + ([(x, y) for x in classifiers() for y in classifiers() if x.__class__ == y.__class__]), + ids=lambda x: x.__class__.__name__, + ) + def test_should_return_same_hash_for_equal_classifier( + self, classifier1: Classifier, classifier2: Classifier, + ) -> None: assert hash(classifier1) == hash(classifier2) - @pytest.mark.parametrize(("classifier1", "classifier2"), ([(x, y) for x in classifiers() for y in classifiers() if x.__class__ != y.__class__]), ids=lambda x: x.__class__.__name__) - def test_should_return_different_hash_for_unequal_classifier(self, classifier1: Classifier, classifier2: Classifier) -> None: + @pytest.mark.parametrize( + ("classifier1", "classifier2"), + ([(x, y) for x in classifiers() for y in classifiers() if x.__class__ != y.__class__]), + ids=lambda x: x.__class__.__name__, + ) + def test_should_return_different_hash_for_unequal_classifier( + self, classifier1: Classifier, classifier2: Classifier, + ) -> None: assert hash(classifier1) != hash(classifier2) @pytest.mark.parametrize("classifier1", classifiers(), ids=lambda x: x.__class__.__name__) - def test_should_return_different_hash_for_same_classifier_fit(self, classifier1: Classifier, valid_data: TaggedTable) -> None: + def test_should_return_different_hash_for_same_classifier_fit( + self, classifier1: Classifier, valid_data: TaggedTable, + ) -> None: regressor1_fit = classifier1.fit(valid_data) assert hash(classifier1) != hash(regressor1_fit) - @pytest.mark.parametrize(("classifier1", "classifier2"), (list(itertools.product(classifiers(), classifiers()))), ids=lambda x: x.__class__.__name__) - def test_should_return_different_hash_for_classifier_fit(self, classifier1: Classifier, classifier2: Classifier, valid_data: TaggedTable) -> None: + @pytest.mark.parametrize( + ("classifier1", "classifier2"), + (list(itertools.product(classifiers(), classifiers()))), + ids=lambda x: x.__class__.__name__, + ) + def test_should_return_different_hash_for_classifier_fit( + self, classifier1: Classifier, classifier2: Classifier, valid_data: TaggedTable, + ) -> None: classifier1_fit = classifier1.fit(valid_data) assert hash(classifier1_fit) != hash(classifier2) diff --git a/tests/safeds/ml/classical/regression/test_regressor.py b/tests/safeds/ml/classical/regression/test_regressor.py index a4b43b9b1..43ce56d15 100644 --- a/tests/safeds/ml/classical/regression/test_regressor.py +++ b/tests/safeds/ml/classical/regression/test_regressor.py @@ -273,21 +273,39 @@ def test_should_return_true_after_fitting(self, regressor: Regressor, valid_data class TestHash: - @pytest.mark.parametrize(("regressor1", "regressor2"), ([(x, y) for x in regressors() for y in regressors() if x.__class__ == y.__class__]), ids=lambda x: x.__class__.__name__) + @pytest.mark.parametrize( + ("regressor1", "regressor2"), + ([(x, y) for x in regressors() for y in regressors() if x.__class__ == y.__class__]), + ids=lambda x: x.__class__.__name__, + ) def test_should_return_same_hash_for_equal_regressor(self, regressor1: Regressor, regressor2: Regressor) -> None: assert hash(regressor1) == hash(regressor2) - @pytest.mark.parametrize(("regressor1", "regressor2"), ([(x, y) for x in regressors() for y in regressors() if x.__class__ != y.__class__]), ids=lambda x: x.__class__.__name__) - def test_should_return_different_hash_for_unequal_regressor(self, regressor1: Regressor, regressor2: Regressor) -> None: + @pytest.mark.parametrize( + ("regressor1", "regressor2"), + ([(x, y) for x in regressors() for y in regressors() if x.__class__ != y.__class__]), + ids=lambda x: x.__class__.__name__, + ) + def test_should_return_different_hash_for_unequal_regressor( + self, regressor1: Regressor, regressor2: Regressor, + ) -> None: assert hash(regressor1) != hash(regressor2) @pytest.mark.parametrize("regressor1", regressors(), ids=lambda x: x.__class__.__name__) - def test_should_return_different_hash_for_same_regressor_fit(self, regressor1: Regressor, valid_data: TaggedTable) -> None: + def test_should_return_different_hash_for_same_regressor_fit( + self, regressor1: Regressor, valid_data: TaggedTable, + ) -> None: regressor1_fit = regressor1.fit(valid_data) assert hash(regressor1) != hash(regressor1_fit) - @pytest.mark.parametrize(("regressor1", "regressor2"), (list(itertools.product(regressors(), regressors()))), ids=lambda x: x.__class__.__name__) - def test_should_return_different_hash_for_regressor_fit(self, regressor1: Regressor, regressor2: Regressor, valid_data: TaggedTable) -> None: + @pytest.mark.parametrize( + ("regressor1", "regressor2"), + (list(itertools.product(regressors(), regressors()))), + ids=lambda x: x.__class__.__name__, + ) + def test_should_return_different_hash_for_regressor_fit( + self, regressor1: Regressor, regressor2: Regressor, valid_data: TaggedTable, + ) -> None: regressor1_fit = regressor1.fit(valid_data) assert hash(regressor1_fit) != hash(regressor2) diff --git a/tests/safeds/ml/nn/test_model.py b/tests/safeds/ml/nn/test_model.py index 3dc62bba6..939978aee 100644 --- a/tests/safeds/ml/nn/test_model.py +++ b/tests/safeds/ml/nn/test_model.py @@ -1,7 +1,7 @@ import pytest from safeds.data.tabular.containers import Table, TaggedTable from safeds.exceptions import ModelNotFittedError, OutOfBoundsError -from safeds.ml.nn import NeuralNetworkClassifier, FNNLayer, NeuralNetworkRegressor +from safeds.ml.nn import FNNLayer, NeuralNetworkClassifier, NeuralNetworkRegressor class TestClassificationModel: From ee083ca679b2d858160878e56e083a436fb42073 Mon Sep 17 00:00:00 2001 From: megalinter-bot <129584137+megalinter-bot@users.noreply.github.com> Date: Sat, 30 Mar 2024 16:01:27 +0000 Subject: [PATCH 3/4] style: apply automated linter fixes --- .../classical/classification/test_classifier.py | 17 +++++++++++++---- .../ml/classical/regression/test_regressor.py | 13 ++++++++++--- 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/tests/safeds/ml/classical/classification/test_classifier.py b/tests/safeds/ml/classical/classification/test_classifier.py index 6e0b981b0..704565bb7 100644 --- a/tests/safeds/ml/classical/classification/test_classifier.py +++ b/tests/safeds/ml/classical/classification/test_classifier.py @@ -268,7 +268,9 @@ class TestHash: ids=lambda x: x.__class__.__name__, ) def test_should_return_same_hash_for_equal_classifier( - self, classifier1: Classifier, classifier2: Classifier, + self, + classifier1: Classifier, + classifier2: Classifier, ) -> None: assert hash(classifier1) == hash(classifier2) @@ -278,13 +280,17 @@ def test_should_return_same_hash_for_equal_classifier( ids=lambda x: x.__class__.__name__, ) def test_should_return_different_hash_for_unequal_classifier( - self, classifier1: Classifier, classifier2: Classifier, + self, + classifier1: Classifier, + classifier2: Classifier, ) -> None: assert hash(classifier1) != hash(classifier2) @pytest.mark.parametrize("classifier1", classifiers(), ids=lambda x: x.__class__.__name__) def test_should_return_different_hash_for_same_classifier_fit( - self, classifier1: Classifier, valid_data: TaggedTable, + self, + classifier1: Classifier, + valid_data: TaggedTable, ) -> None: regressor1_fit = classifier1.fit(valid_data) assert hash(classifier1) != hash(regressor1_fit) @@ -295,7 +301,10 @@ def test_should_return_different_hash_for_same_classifier_fit( ids=lambda x: x.__class__.__name__, ) def test_should_return_different_hash_for_classifier_fit( - self, classifier1: Classifier, classifier2: Classifier, valid_data: TaggedTable, + self, + classifier1: Classifier, + classifier2: Classifier, + valid_data: TaggedTable, ) -> None: classifier1_fit = classifier1.fit(valid_data) assert hash(classifier1_fit) != hash(classifier2) diff --git a/tests/safeds/ml/classical/regression/test_regressor.py b/tests/safeds/ml/classical/regression/test_regressor.py index 43ce56d15..152aa8c0d 100644 --- a/tests/safeds/ml/classical/regression/test_regressor.py +++ b/tests/safeds/ml/classical/regression/test_regressor.py @@ -287,13 +287,17 @@ def test_should_return_same_hash_for_equal_regressor(self, regressor1: Regressor ids=lambda x: x.__class__.__name__, ) def test_should_return_different_hash_for_unequal_regressor( - self, regressor1: Regressor, regressor2: Regressor, + self, + regressor1: Regressor, + regressor2: Regressor, ) -> None: assert hash(regressor1) != hash(regressor2) @pytest.mark.parametrize("regressor1", regressors(), ids=lambda x: x.__class__.__name__) def test_should_return_different_hash_for_same_regressor_fit( - self, regressor1: Regressor, valid_data: TaggedTable, + self, + regressor1: Regressor, + valid_data: TaggedTable, ) -> None: regressor1_fit = regressor1.fit(valid_data) assert hash(regressor1) != hash(regressor1_fit) @@ -304,7 +308,10 @@ def test_should_return_different_hash_for_same_regressor_fit( ids=lambda x: x.__class__.__name__, ) def test_should_return_different_hash_for_regressor_fit( - self, regressor1: Regressor, regressor2: Regressor, valid_data: TaggedTable, + self, + regressor1: Regressor, + regressor2: Regressor, + valid_data: TaggedTable, ) -> None: regressor1_fit = regressor1.fit(valid_data) assert hash(regressor1_fit) != hash(regressor2) From 96f48f1d523124f5b4270ff07aef5802b4a7ef4d Mon Sep 17 00:00:00 2001 From: Lars Reimann Date: Sat, 30 Mar 2024 17:13:22 +0100 Subject: [PATCH 4/4] docs: fix broken references --- docs/glossary.md | 6 +++--- docs/tutorials/classification.ipynb | 4 ++-- docs/tutorials/machine_learning.ipynb | 4 ++-- docs/tutorials/regression.ipynb | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/glossary.md b/docs/glossary.md index f0afeb80e..01c358560 100644 --- a/docs/glossary.md +++ b/docs/glossary.md @@ -25,7 +25,7 @@ It classifies the predictions to be either be [true positive](#true-positive-tp) ## Decision Tree A Decision Tree represents the process of conditional evaluation in a tree diagram. -Implemented in Safe-DS as [Decision Tree][safeds.ml.classical.classification.DecisionTree]. +Implemented in Safe-DS as [DecisionTreeClassifier][safeds.ml.classical.classification.DecisionTreeClassifier] and [DecisionTreeRegressor][safeds.ml.classical.regression.DecisionTreeRegressor]. ## F1-Score The harmonic mean of [precision](#precision) and [recall](#recall). Formula: @@ -48,7 +48,7 @@ It is analogous to a column within a table. Linear Regression is the supervised Machine Learning model in which the model finds the best fit linear line between the independent and dependent variable i.e. it finds the linear relationship between the dependent and independent variable. -Implemented in Safe-DS as [LinearRegression][safeds.ml.classical.regression.LinearRegression]. +Implemented in Safe-DS as [LinearRegression][safeds.ml.classical.regression.LinearRegressionRegressor]. ## Machine Learning (ML) Machine Learning is a generic term for artificially generating knowledge through experience. @@ -84,7 +84,7 @@ See here for respective references: ## Random Forest Random Forest is an ML model that works by generating decision trees at random. -Implemented in Safe-DS as [RandomForest][safeds.ml.classical.regression.RandomForest]. +Implemented in Safe-DS as [RandomForestClassifier][safeds.ml.classical.classification.RandomForestClassifier] and [RandomForestRegressor][safeds.ml.classical.regression.RandomForestRegressor]. ## Recall The ability of a [classification](#classification) model to identify all the relevant data points. Formula: diff --git a/docs/tutorials/classification.ipynb b/docs/tutorials/classification.ipynb index 59e2b812b..89f994afd 100644 --- a/docs/tutorials/classification.ipynb +++ b/docs/tutorials/classification.ipynb @@ -145,9 +145,9 @@ "execution_count": null, "outputs": [], "source": [ - "from safeds.ml.classical.classification import RandomForest\n", + "from safeds.ml.classical.classification import RandomForestClassifier\n", "\n", - "model = RandomForest()\n", + "model = RandomForestClassifier()\n", "fitted_model= model.fit(tagged_train_table)" ], "metadata": { diff --git a/docs/tutorials/machine_learning.ipynb b/docs/tutorials/machine_learning.ipynb index f9d53bca1..acb5d3e27 100644 --- a/docs/tutorials/machine_learning.ipynb +++ b/docs/tutorials/machine_learning.ipynb @@ -54,9 +54,9 @@ "execution_count": null, "outputs": [], "source": [ - "from safeds.ml.classical.regression import LinearRegression\n", + "from safeds.ml.classical.regression import LinearRegressionRegressor\n", "\n", - "model = LinearRegression()\n", + "model = LinearRegressionRegressor()\n", "fitted_model = model.fit(tagged_table)" ], "metadata": { diff --git a/docs/tutorials/regression.ipynb b/docs/tutorials/regression.ipynb index 7b26f0ee3..21aaba3de 100644 --- a/docs/tutorials/regression.ipynb +++ b/docs/tutorials/regression.ipynb @@ -98,9 +98,9 @@ "execution_count": null, "outputs": [], "source": [ - "from safeds.ml.classical.regression import DecisionTree\n", + "from safeds.ml.classical.regression import DecisionTreeRegressor\n", "\n", - "model = DecisionTree()\n", + "model = DecisionTreeRegressor()\n", "fitted_model = model.fit(tagged_train_table)" ], "metadata": {