From 543002c223aebbea4931a8a2d99922679e90a382 Mon Sep 17 00:00:00 2001 From: Jiaming Yuan Date: Sat, 11 Mar 2023 03:11:13 +0800 Subject: [PATCH] fix rebase. --- demo/guide-python/multioutput_regression.py | 6 +++--- doc/parameter.rst | 6 +++--- doc/tutorials/multioutput.rst | 6 +++--- python-package/xgboost/sklearn.py | 2 ++ src/learner.cc | 5 ----- tests/cpp/predictor/test_gpu_predictor.cu | 6 +++--- 6 files changed, 14 insertions(+), 17 deletions(-) diff --git a/demo/guide-python/multioutput_regression.py b/demo/guide-python/multioutput_regression.py index 69bc4d300c08..4ac9b596e469 100644 --- a/demo/guide-python/multioutput_regression.py +++ b/demo/guide-python/multioutput_regression.py @@ -95,7 +95,7 @@ def rmse(predt: np.ndarray, dtrain: xgb.DMatrix) -> Tuple[str, float]: { "tree_method": "hist", "num_target": y.shape[1], - "multi_strategy": "mono", + "multi_strategy": "monolithic", }, dtrain=Xy, num_boost_round=128, @@ -116,8 +116,8 @@ def rmse(predt: np.ndarray, dtrain: xgb.DMatrix) -> Tuple[str, float]: args = parser.parse_args() # Train with builtin RMSE objective # one model per output - rmse_model(args.plot == 1, "compo") + rmse_model(args.plot == 1, "composite") # one model for all outputs - rmse_model(args.plot == 1, "mono") + rmse_model(args.plot == 1, "monolithic") # Train with custom objective. custom_rmse_model(args.plot == 1) diff --git a/doc/parameter.rst b/doc/parameter.rst index 459eceb97dfe..90d261601d38 100644 --- a/doc/parameter.rst +++ b/doc/parameter.rst @@ -226,12 +226,12 @@ Parameters for Tree Booster list is a group of indices of features that are allowed to interact with each other. See :doc:`/tutorials/feature_interaction_constraint` for more information. -* ``multi_strategy``, [default = ``compo``] +* ``multi_strategy``, [default = ``composite``] - The strategy used for training multi-target models. - - ``compo``: One model for each target. - - ``mono``: Use multi-target trees. + - ``composite``: One model for each target. + - ``monolithic``: Use multi-target trees. .. _cat-param: diff --git a/doc/tutorials/multioutput.rst b/doc/tutorials/multioutput.rst index 3e474a7982c0..9de68efbfba4 100644 --- a/doc/tutorials/multioutput.rst +++ b/doc/tutorials/multioutput.rst @@ -54,12 +54,12 @@ Training with Vector Leaf XGBoost can optionally build multi-output trees with the size of leaf equals to the number of targets. The behavior can be controlled by the ``multi_strategy`` training -parameter. It can take the value `compo` (the default) or `mono`. Specify `mono` and use -``tree_method=hist`` to enable this feature. +parameter. It can take the value `composite` (the default) or `monolithic`. Specify +`monolithic` and use ``tree_method=hist`` to enable this feature. .. code-black:: python - clf = xgb.XGBClassifier(tree_method="hist", multi_strategy="mono") + clf = xgb.XGBClassifier(tree_method="hist", multi_strategy="monolithic") See :ref:`sphx_glr_python_examples_multioutput_regression.py` for a worked example. diff --git a/python-package/xgboost/sklearn.py b/python-package/xgboost/sklearn.py index 3204f5a2a61e..c3a93652c2e9 100644 --- a/python-package/xgboost/sklearn.py +++ b/python-package/xgboost/sklearn.py @@ -624,6 +624,7 @@ def __init__( feature_types: Optional[FeatureTypes] = None, max_cat_to_onehot: Optional[int] = None, max_cat_threshold: Optional[int] = None, + multi_strategy: Optional[str] = None, eval_metric: Optional[Union[str, List[str], Callable]] = None, early_stopping_rounds: Optional[int] = None, callbacks: Optional[List[TrainingCallback]] = None, @@ -670,6 +671,7 @@ def __init__( self.feature_types = feature_types self.max_cat_to_onehot = max_cat_to_onehot self.max_cat_threshold = max_cat_threshold + self.multi_strategy = multi_strategy self.eval_metric = eval_metric self.early_stopping_rounds = early_stopping_rounds self.callbacks = callbacks diff --git a/src/learner.cc b/src/learner.cc index 36437596e743..04c50e4005d4 100644 --- a/src/learner.cc +++ b/src/learner.cc @@ -67,11 +67,6 @@ const char* kMaxDeltaStepDefaultValue = "0.7"; DECLARE_FIELD_ENUM_CLASS(xgboost::MultiStrategy); -namespace xgboost { -std::string StrategyStr(Strategy s) { return s == Strategy::kComposite ? "compo" : "mono"; } -} // namespace xgboost -DECLARE_FIELD_ENUM_CLASS(xgboost::Strategy); - namespace xgboost { Learner::~Learner() = default; namespace { diff --git a/tests/cpp/predictor/test_gpu_predictor.cu b/tests/cpp/predictor/test_gpu_predictor.cu index 24031c583bfb..aa898f805e21 100644 --- a/tests/cpp/predictor/test_gpu_predictor.cu +++ b/tests/cpp/predictor/test_gpu_predictor.cu @@ -36,7 +36,7 @@ TEST(GPUPredictor, Basic) { Context ctx; ctx.gpu_id = 0; - LearnerModelParam mparam{MakeMP(n_col, .5, 1, ctx.gpu_id)}; + LearnerModelParam mparam{MakeMP(n_col, .5, ctx.gpu_id)}; gbm::GBTreeModel model = CreateTestModel(&mparam, &ctx); // Test predict batch @@ -150,7 +150,7 @@ TEST(GPUPredictor, ShapStump) { Context ctx; ctx.gpu_id = 0; - LearnerModelParam mparam{MakeMP(1, .5, 1, 1, ctx.gpu_id)}; + LearnerModelParam mparam{MakeMP(1, .5, 1, ctx.gpu_id)}; gbm::GBTreeModel model(&mparam, &ctx); std::vector> trees; @@ -177,7 +177,7 @@ TEST(GPUPredictor, ShapStump) { TEST(GPUPredictor, Shap) { Context ctx; ctx.gpu_id = 0; - LearnerModelParam mparam{MakeMP(1, .5, 1, 1, ctx.gpu_id)}; + LearnerModelParam mparam{MakeMP(1, .5, 1, ctx.gpu_id)}; gbm::GBTreeModel model(&mparam, &ctx); std::vector> trees;