From 284f7a726ca73ad429d44fc3508ffc4d15818e62 Mon Sep 17 00:00:00 2001 From: "Kruglov, Oleg" Date: Thu, 13 Jul 2023 16:18:18 -0700 Subject: [PATCH 01/13] Implemented proper work with multiple threads --- sklearnex/dispatcher.py | 10 +++++++++ sklearnex/utils/parallel.py | 44 +++++++++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 sklearnex/utils/parallel.py diff --git a/sklearnex/dispatcher.py b/sklearnex/dispatcher.py index d19a4d4786..daff63776d 100644 --- a/sklearnex/dispatcher.py +++ b/sklearnex/dispatcher.py @@ -47,11 +47,13 @@ def get_patch_map(): import sklearn.linear_model as linear_model_module import sklearn.neighbors as neighbors_module import sklearn.svm as svm_module + import sklearn.utils.parallel as parallel_module # Classes and functions for patching from ._config import config_context as config_context_sklearnex from ._config import get_config as get_config_sklearnex from ._config import set_config as set_config_sklearnex + from .utils.parallel import _FuncWrapper as _FuncWrapper_sklearnex from .neighbors import KNeighborsClassifier as KNeighborsClassifier_sklearnex from .neighbors import KNeighborsRegressor as KNeighborsRegressor_sklearnex from .neighbors import LocalOutlierFactor as LocalOutlierFactor_sklearnex @@ -221,6 +223,14 @@ def get_patch_map(): mapping["config_context"] = [ [(base_module, "config_context", config_context_sklearnex), None] ] + + # Necessary for proper work with multiple threads + mapping["parallel.get_config"] = [ + [(parallel_module, "get_config", get_config_sklearnex), None] + ] + mapping["_funcwrapper"] = [ + [(parallel_module, "_FuncWrapper", _FuncWrapper_sklearnex), None] + ] return mapping diff --git a/sklearnex/utils/parallel.py b/sklearnex/utils/parallel.py new file mode 100644 index 0000000000..479f8aae3c --- /dev/null +++ b/sklearnex/utils/parallel.py @@ -0,0 +1,44 @@ +#=============================================================================== +# Copyright 2022 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#=============================================================================== + +import warnings +from functools import update_wrapper +from .._config import config_context + +class _FuncWrapper: + """Load the global configuration before calling the function.""" + + def __init__(self, function): + self.function = function + update_wrapper(self, self.function) + + def with_config(self, config): + self.config = config + return self + + def __call__(self, *args, **kwargs): + config = getattr(self, "config", None) + if config is None: + warnings.warn( + "`sklearn.utils.parallel.delayed` should be used with " + "`sklearn.utils.parallel.Parallel` to make it possible to propagate " + "the scikit-learn configuration of the current thread to the " + "joblib workers.", + UserWarning, + ) + config = {} + with config_context(**config): + return self.function(*args, **kwargs) From 53b59fd7782b853cc9a5c62d70cab3afdaf9a98a Mon Sep 17 00:00:00 2001 From: "Kruglov, Oleg" Date: Thu, 13 Jul 2023 18:32:15 -0700 Subject: [PATCH 02/13] Add support for older sklearn versions --- sklearnex/dispatcher.py | 10 ++++++++-- sklearnex/utils/parallel.py | 15 ++++++++++++++- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/sklearnex/dispatcher.py b/sklearnex/dispatcher.py index daff63776d..1a511a7ded 100644 --- a/sklearnex/dispatcher.py +++ b/sklearnex/dispatcher.py @@ -47,13 +47,19 @@ def get_patch_map(): import sklearn.linear_model as linear_model_module import sklearn.neighbors as neighbors_module import sklearn.svm as svm_module - import sklearn.utils.parallel as parallel_module + if sklearn_check_version('1.2.1'): + import sklearn.utils.parallel as parallel_module + else: + import sklearn.utils.fixes as parallel_module # Classes and functions for patching from ._config import config_context as config_context_sklearnex from ._config import get_config as get_config_sklearnex from ._config import set_config as set_config_sklearnex - from .utils.parallel import _FuncWrapper as _FuncWrapper_sklearnex + if sklearn_check_version('1.2.1'): + from .utils.parallel import _FuncWrapper as _FuncWrapper_sklearnex + else: + from .utils.parallel import _FuncWrapperOld as _FuncWrapper_sklearnex from .neighbors import KNeighborsClassifier as KNeighborsClassifier_sklearnex from .neighbors import KNeighborsRegressor as KNeighborsRegressor_sklearnex from .neighbors import LocalOutlierFactor as LocalOutlierFactor_sklearnex diff --git a/sklearnex/utils/parallel.py b/sklearnex/utils/parallel.py index 479f8aae3c..d0a7310830 100644 --- a/sklearnex/utils/parallel.py +++ b/sklearnex/utils/parallel.py @@ -16,7 +16,8 @@ import warnings from functools import update_wrapper -from .._config import config_context +from .._config import config_context, get_config + class _FuncWrapper: """Load the global configuration before calling the function.""" @@ -42,3 +43,15 @@ def __call__(self, *args, **kwargs): config = {} with config_context(**config): return self.function(*args, **kwargs) + +class _FuncWrapperOld: + """ "Load the global configuration before calling the function.""" + + def __init__(self, function): + self.function = function + self.config = get_config() + update_wrapper(self, self.function) + + def __call__(self, *args, **kwargs): + with config_context(**self.config): + return self.function(*args, **kwargs) From 674a6664dd1380b06f4f79c5082997b4ce478b91 Mon Sep 17 00:00:00 2001 From: "Kruglov, Oleg" Date: Mon, 17 Jul 2023 14:22:41 -0700 Subject: [PATCH 03/13] Change condition in test_memory_usage to avoid test failure --- sklearnex/tests/test_memory_usage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sklearnex/tests/test_memory_usage.py b/sklearnex/tests/test_memory_usage.py index bd7d87bd51..164797c3fe 100644 --- a/sklearnex/tests/test_memory_usage.py +++ b/sklearnex/tests/test_memory_usage.py @@ -90,8 +90,8 @@ def get_patched_estimators(ban_list, output_list): estimator, name = listing[0][0][2], listing[0][0][1] if not isinstance(estimator, types.FunctionType): if name not in ban_list: - if isinstance(estimator(), BaseEstimator): - if hasattr(estimator, "fit"): + if issubclass(estimator, BaseEstimator): + if hasattr(estimator, 'fit'): output_list.append(estimator) From bb2cd80c19a4ec1cb2a34ec34e89d4df8e1f9c87 Mon Sep 17 00:00:00 2001 From: "Kruglov, Oleg" Date: Mon, 17 Jul 2023 14:27:34 -0700 Subject: [PATCH 04/13] Style fixes --- sklearnex/utils/parallel.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sklearnex/utils/parallel.py b/sklearnex/utils/parallel.py index d0a7310830..58eb857412 100644 --- a/sklearnex/utils/parallel.py +++ b/sklearnex/utils/parallel.py @@ -1,5 +1,5 @@ #=============================================================================== -# Copyright 2022 Intel Corporation +# Copyright 2023 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -44,8 +44,9 @@ def __call__(self, *args, **kwargs): with config_context(**config): return self.function(*args, **kwargs) + class _FuncWrapperOld: - """ "Load the global configuration before calling the function.""" + """Load the global configuration before calling the function.""" def __init__(self, function): self.function = function From 67e080cef1b93cdb2b9f046710d4b84e0596dd01 Mon Sep 17 00:00:00 2001 From: "Kruglov, Oleg" Date: Mon, 31 Jul 2023 15:36:31 -0700 Subject: [PATCH 05/13] Remove unnecessary queue propagation via config in BaseSVC._fit_proba --- sklearnex/svm/_common.py | 46 +++++++++++++++++----------------------- 1 file changed, 19 insertions(+), 27 deletions(-) diff --git a/sklearnex/svm/_common.py b/sklearnex/svm/_common.py index 36c4d6becf..6c305a0425 100644 --- a/sklearnex/svm/_common.py +++ b/sklearnex/svm/_common.py @@ -98,39 +98,31 @@ def _compute_balanced_class_weight(self, y): return recip_freq[le.transform(classes)] def _fit_proba(self, X, y, sample_weight=None, queue=None): - from .._config import config_context, get_config - params = self.get_params() params["probability"] = False params["decision_function_shape"] = "ovr" clf_base = self.__class__(**params) - # We use stock metaestimators below, so the only way - # to pass a queue is using config_context. - cfg = get_config() - cfg["target_offload"] = queue - with config_context(**cfg): - try: - n_splits = 5 - n_jobs = n_splits if queue is None or queue.sycl_device.is_cpu else 1 - cv = StratifiedKFold( - n_splits=n_splits, shuffle=True, random_state=self.random_state - ) - if sklearn_check_version("0.24"): - self.clf_prob = CalibratedClassifierCV( - clf_base, ensemble=False, cv=cv, method="sigmoid", n_jobs=n_jobs - ) - else: - self.clf_prob = CalibratedClassifierCV( - clf_base, cv=cv, method="sigmoid" - ) - self.clf_prob.fit(X, y, sample_weight) - except ValueError: - clf_base = clf_base.fit(X, y, sample_weight) + try: + n_splits = 5 + n_jobs = n_splits if queue is None or queue.sycl_device.is_cpu else 1 + cv = StratifiedKFold( + n_splits=n_splits, + shuffle=True, + random_state=self.random_state) + if sklearn_check_version("0.24"): + self.clf_prob = CalibratedClassifierCV( + clf_base, ensemble=False, cv=cv, method='sigmoid', + n_jobs=n_jobs) + else: self.clf_prob = CalibratedClassifierCV( - clf_base, cv="prefit", method="sigmoid" - ) - self.clf_prob.fit(X, y, sample_weight) + clf_base, cv=cv, method='sigmoid') + self.clf_prob.fit(X, y, sample_weight) + except ValueError: + clf_base = clf_base.fit(X, y, sample_weight) + self.clf_prob = CalibratedClassifierCV( + clf_base, cv="prefit", method='sigmoid') + self.clf_prob.fit(X, y, sample_weight) def _save_attributes(self): self.support_vectors_ = self._onedal_estimator.support_vectors_ From d7ed459eef13f49bbbc31457e859e90134fbf632 Mon Sep 17 00:00:00 2001 From: "Kruglov, Oleg" Date: Mon, 31 Jul 2023 16:18:36 -0700 Subject: [PATCH 06/13] Sort imports --- sklearnex/dispatcher.py | 6 ++++-- sklearnex/svm/_common.py | 15 +++++++-------- sklearnex/tests/test_memory_usage.py | 2 +- sklearnex/utils/parallel.py | 5 +++-- 4 files changed, 15 insertions(+), 13 deletions(-) diff --git a/sklearnex/dispatcher.py b/sklearnex/dispatcher.py index 1a511a7ded..e24f7a9ea1 100644 --- a/sklearnex/dispatcher.py +++ b/sklearnex/dispatcher.py @@ -47,7 +47,8 @@ def get_patch_map(): import sklearn.linear_model as linear_model_module import sklearn.neighbors as neighbors_module import sklearn.svm as svm_module - if sklearn_check_version('1.2.1'): + + if sklearn_check_version("1.2.1"): import sklearn.utils.parallel as parallel_module else: import sklearn.utils.fixes as parallel_module @@ -56,7 +57,8 @@ def get_patch_map(): from ._config import config_context as config_context_sklearnex from ._config import get_config as get_config_sklearnex from ._config import set_config as set_config_sklearnex - if sklearn_check_version('1.2.1'): + + if sklearn_check_version("1.2.1"): from .utils.parallel import _FuncWrapper as _FuncWrapper_sklearnex else: from .utils.parallel import _FuncWrapperOld as _FuncWrapper_sklearnex diff --git a/sklearnex/svm/_common.py b/sklearnex/svm/_common.py index 6c305a0425..2b92385503 100644 --- a/sklearnex/svm/_common.py +++ b/sklearnex/svm/_common.py @@ -107,21 +107,20 @@ def _fit_proba(self, X, y, sample_weight=None, queue=None): n_splits = 5 n_jobs = n_splits if queue is None or queue.sycl_device.is_cpu else 1 cv = StratifiedKFold( - n_splits=n_splits, - shuffle=True, - random_state=self.random_state) + n_splits=n_splits, shuffle=True, random_state=self.random_state + ) if sklearn_check_version("0.24"): self.clf_prob = CalibratedClassifierCV( - clf_base, ensemble=False, cv=cv, method='sigmoid', - n_jobs=n_jobs) + clf_base, ensemble=False, cv=cv, method="sigmoid", n_jobs=n_jobs + ) else: - self.clf_prob = CalibratedClassifierCV( - clf_base, cv=cv, method='sigmoid') + self.clf_prob = CalibratedClassifierCV(clf_base, cv=cv, method="sigmoid") self.clf_prob.fit(X, y, sample_weight) except ValueError: clf_base = clf_base.fit(X, y, sample_weight) self.clf_prob = CalibratedClassifierCV( - clf_base, cv="prefit", method='sigmoid') + clf_base, cv="prefit", method="sigmoid" + ) self.clf_prob.fit(X, y, sample_weight) def _save_attributes(self): diff --git a/sklearnex/tests/test_memory_usage.py b/sklearnex/tests/test_memory_usage.py index 164797c3fe..9c590289dc 100644 --- a/sklearnex/tests/test_memory_usage.py +++ b/sklearnex/tests/test_memory_usage.py @@ -91,7 +91,7 @@ def get_patched_estimators(ban_list, output_list): if not isinstance(estimator, types.FunctionType): if name not in ban_list: if issubclass(estimator, BaseEstimator): - if hasattr(estimator, 'fit'): + if hasattr(estimator, "fit"): output_list.append(estimator) diff --git a/sklearnex/utils/parallel.py b/sklearnex/utils/parallel.py index 58eb857412..52e9a0b6f6 100644 --- a/sklearnex/utils/parallel.py +++ b/sklearnex/utils/parallel.py @@ -1,4 +1,4 @@ -#=============================================================================== +# =============================================================================== # Copyright 2023 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,10 +12,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -#=============================================================================== +# =============================================================================== import warnings from functools import update_wrapper + from .._config import config_context, get_config From 18ea5bd25c488a0c1b760b316359b4a4a95a05fe Mon Sep 17 00:00:00 2001 From: "Kruglov, Oleg" Date: Tue, 5 Sep 2023 06:35:50 -0700 Subject: [PATCH 07/13] Remove problematic test from deselected --- deselected_tests.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/deselected_tests.yaml b/deselected_tests.yaml index 8b514e3ef6..60de1bc44c 100755 --- a/deselected_tests.yaml +++ b/deselected_tests.yaml @@ -728,7 +728,6 @@ gpu: - neighbors/tests/test_neighbors.py::test_neigh_predictions_algorithm_agnosticity[float64-KNeighborsRegressor-50-500-l2-1000-5-100] - neighbors/tests/test_neighbors.py::test_neigh_predictions_algorithm_agnosticity[float64-KNeighborsRegressor-100-1000-l2-1000-5-100] # failing due to numeric/code error - - ensemble/tests/test_bagging.py::test_parallel_classification - linear_model/tests/test_common.py::test_balance_property[42-False-LogisticRegressionCV] - sklearn/manifold/tests/test_t_sne.py::test_n_iter_without_progress - model_selection/tests/test_search.py::test_searchcv_raise_warning_with_non_finite_score[RandomizedSearchCV-specialized_params1-False] From e331715d54595c652967d79586167db7f0739ab8 Mon Sep 17 00:00:00 2001 From: Alexander Andreev Date: Thu, 7 Sep 2023 11:33:51 -0700 Subject: [PATCH 08/13] Add test for config_context in parallel --- sklearnex/tests/test_parallel.py | 50 ++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 sklearnex/tests/test_parallel.py diff --git a/sklearnex/tests/test_parallel.py b/sklearnex/tests/test_parallel.py new file mode 100644 index 0000000000..36c3165315 --- /dev/null +++ b/sklearnex/tests/test_parallel.py @@ -0,0 +1,50 @@ +# ============================================================================== +# Copyright 2023 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import pytest + +from sklearnex import config_context, patch_sklearn + +patch_sklearn() + +from sklearn.datasets import make_classification +from sklearn.ensemble import BaggingClassifier +from sklearn.svm import SVC + +try: + import dpctl + + dpctl_is_available = True + gpu_is_available = dpctl.has_gpu_devices() +except (ImportError, ModuleNotFoundError): + dpctl_is_available = False + + +@pytest.mark.skipif( + not dpctl_is_available or gpu_is_available, + reason="GPU device should not be available for this test " + "to see raised 'SyclQueueCreationError'. " + "'dpctl' module is required for test.", +) +def test_config_context_in_parallel(): + x, y = make_classification(random_state=42) + try: + with config_context(target_offload="gpu"): + BaggingClassifier(SVC(), n_jobs=2).fit(x, y) + raise ValueError( + "'SyclQueueCreationError' wasn't raised " "for non-existing 'gpu' device" + ) + except dpctl._sycl_queue.SyclQueueCreationError: + pass From f0167260a20f8c5462560c209f103c3ee71b76ce Mon Sep 17 00:00:00 2001 From: Alexander Andreev Date: Mon, 11 Sep 2023 16:41:19 +0100 Subject: [PATCH 09/13] Explicitly disallow fallback to host in test --- sklearnex/tests/test_parallel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sklearnex/tests/test_parallel.py b/sklearnex/tests/test_parallel.py index 36c3165315..2736c93cb5 100644 --- a/sklearnex/tests/test_parallel.py +++ b/sklearnex/tests/test_parallel.py @@ -41,7 +41,7 @@ def test_config_context_in_parallel(): x, y = make_classification(random_state=42) try: - with config_context(target_offload="gpu"): + with config_context(target_offload="gpu", allow_fallback_to_host=False): BaggingClassifier(SVC(), n_jobs=2).fit(x, y) raise ValueError( "'SyclQueueCreationError' wasn't raised " "for non-existing 'gpu' device" From d5424014785d60ab637e285fa0c0b6aa4dd91f7d Mon Sep 17 00:00:00 2001 From: Alexander Andreev Date: Mon, 11 Sep 2023 16:50:13 +0100 Subject: [PATCH 10/13] Debug print for dpctl in test running --- .ci/scripts/run_sklearn_tests.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.ci/scripts/run_sklearn_tests.sh b/.ci/scripts/run_sklearn_tests.sh index 8a05eaf47b..1ff00d24ca 100755 --- a/.ci/scripts/run_sklearn_tests.sh +++ b/.ci/scripts/run_sklearn_tests.sh @@ -39,5 +39,10 @@ if [ -n "${OCL_ICD_FILENAMES}" ]; then echo "OCL_ICD_FILENAMES is set to ${OCL_ICD_FILENAMES}" fi +# Show devices listed by dpctl +if [ -n "$(pip list | grep dpctl)" ]; then + python -c "import dpctl; print(dpctl.get_devices())" +fi + python scripts/run_sklearn_tests.py -d ${1:-none} exit $? From 5648d30b25068b92226665ff8b2c83e109cdab03 Mon Sep 17 00:00:00 2001 From: Alexander Andreev Date: Mon, 11 Sep 2023 18:12:53 +0100 Subject: [PATCH 11/13] Change parallel backend for test --- sklearnex/tests/test_parallel.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sklearnex/tests/test_parallel.py b/sklearnex/tests/test_parallel.py index 2736c93cb5..6deaed97d8 100644 --- a/sklearnex/tests/test_parallel.py +++ b/sklearnex/tests/test_parallel.py @@ -19,6 +19,7 @@ patch_sklearn() +from joblib import parallel_config from sklearn.datasets import make_classification from sklearn.ensemble import BaggingClassifier from sklearn.svm import SVC @@ -41,8 +42,9 @@ def test_config_context_in_parallel(): x, y = make_classification(random_state=42) try: - with config_context(target_offload="gpu", allow_fallback_to_host=False): - BaggingClassifier(SVC(), n_jobs=2).fit(x, y) + with parallel_config(backend="threading"): + with config_context(target_offload="gpu", allow_fallback_to_host=False): + BaggingClassifier(SVC(), n_jobs=2).fit(x, y) raise ValueError( "'SyclQueueCreationError' wasn't raised " "for non-existing 'gpu' device" ) From 20b0e84b64a74a0194c14010d19a5e45aa6f6454 Mon Sep 17 00:00:00 2001 From: Alexander Andreev Date: Mon, 11 Sep 2023 20:51:41 +0100 Subject: [PATCH 12/13] Revert "Change parallel backend for test" This reverts commit 5648d30b25068b92226665ff8b2c83e109cdab03. --- sklearnex/tests/test_parallel.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/sklearnex/tests/test_parallel.py b/sklearnex/tests/test_parallel.py index 6deaed97d8..2736c93cb5 100644 --- a/sklearnex/tests/test_parallel.py +++ b/sklearnex/tests/test_parallel.py @@ -19,7 +19,6 @@ patch_sklearn() -from joblib import parallel_config from sklearn.datasets import make_classification from sklearn.ensemble import BaggingClassifier from sklearn.svm import SVC @@ -42,9 +41,8 @@ def test_config_context_in_parallel(): x, y = make_classification(random_state=42) try: - with parallel_config(backend="threading"): - with config_context(target_offload="gpu", allow_fallback_to_host=False): - BaggingClassifier(SVC(), n_jobs=2).fit(x, y) + with config_context(target_offload="gpu", allow_fallback_to_host=False): + BaggingClassifier(SVC(), n_jobs=2).fit(x, y) raise ValueError( "'SyclQueueCreationError' wasn't raised " "for non-existing 'gpu' device" ) From 9f74cee9b1f6e8925777c4f77eec4ca100cf353a Mon Sep 17 00:00:00 2001 From: Alexander Andreev Date: Mon, 11 Sep 2023 20:52:25 +0100 Subject: [PATCH 13/13] Separate monkeypatch tests --- conda-recipe/run_test.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/conda-recipe/run_test.sh b/conda-recipe/run_test.sh index 879b37736b..0eea868ec7 100755 --- a/conda-recipe/run_test.sh +++ b/conda-recipe/run_test.sh @@ -54,7 +54,9 @@ pytest --verbose --pyargs ${daal4py_dir}/daal4py/sklearn return_code=$(($return_code + $?)) echo "Pytest of sklearnex running ..." -pytest --verbose --pyargs ${daal4py_dir}/sklearnex +# TODO: investigate why test_monkeypatch.py might cause failures of other tests +pytest --verbose --pyargs --deselect sklearnex/tests/test_monkeypatch.py ${daal4py_dir}/sklearnex +pytest --verbose ${daal4py_dir}/sklearnex/tests/test_monkeypatch.py return_code=$(($return_code + $?)) echo "Pytest of onedal running ..."