From 48ccbc179002e7878a181607017805221ebe82c9 Mon Sep 17 00:00:00 2001
From: RektPunk
Date: Tue, 10 Sep 2024 21:25:35 +0900
Subject: [PATCH 1/2] update docs
---
README.md | 15 +++++++++++++++
mqboost/dataset.py | 9 ++++-----
mqboost/optimize.py | 13 ++++++-------
3 files changed, 25 insertions(+), 12 deletions(-)
diff --git a/README.md b/README.md
index 32a0c77..40974f3 100644
--- a/README.md
+++ b/README.md
@@ -5,6 +5,21 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mqboost/dataset.py b/mqboost/dataset.py
index 4e85a21..13b757f 100644
--- a/mqboost/dataset.py
+++ b/mqboost/dataset.py
@@ -21,17 +21,16 @@ class MQDataset:
It supports both LightGBM and XGBoost models, handling data preparation, validation, and conversion for training and prediction.
Attributes:
- alphas (list[float]):
+ alphas (list[float] | float):
List of quantile levels.
Must be in ascending order and contain no duplicates.
- data (pd.DataFrame): The input features.
- label (pd.DataFrame): The target labels (if provided).
- model (ModelName): The model type (LightGBM or XGBoost).
+ data (pd.DataFrame | pd.Series | np.ndarray): The input features.
+ label (pd.Series | np.ndarray): The target labels (if provided).
+ model (str): The model type (LightGBM or XGBoost).
Property:
train_dtype: Returns the data type function for training data.
predict_dtype: Returns the data type function for prediction data.
- model: Returns the model type.
columns: Returns the column names of the input features.
nrow: Returns the number of rows in the dataset.
data: Returns the input features.
diff --git a/mqboost/optimize.py b/mqboost/optimize.py
index e3882d4..c8cc416 100644
--- a/mqboost/optimize.py
+++ b/mqboost/optimize.py
@@ -73,7 +73,6 @@ class MQOptimizer:
The objective function for the quantile regression ('check', 'huber', or 'phuber'). Default is 'check'.
delta (float): Delta parameter for the 'huber' objective function. Default is 0.01.
epsilon (float): Epsilon parameter for the 'apptox' objective function. Default is 1e-5.
- get_params (Callable): Function to get hyperparameters for the model.
Methods:
optimize_params(dataset, n_trials, get_params_func, valid_set):
@@ -116,20 +115,20 @@ def optimize_params(
n_trials (int): The number of trials for the hyperparameter optimization.
get_params_func (Callable, optional): A custom function to get the parameters for the model.
For example,
- def get_params(trial: Trial, model: ModelName):
+ def get_params(trial: Trial):
return {
- "learning_rate": trial.suggest_float("learning_rate", 1e-2, 1.0, log=True),
+ "learning_rate": trial.suggest_float("learning_rate", 1e-2, 1.0),
"max_depth": trial.suggest_int("max_depth", 1, 10),
- "lambda_l1": trial.suggest_float("lambda_l1", 1e-8, 10.0, log=True),
- "lambda_l2": trial.suggest_float("lambda_l2", 1e-8, 10.0, log=True),
+ "lambda_l1": trial.suggest_float("lambda_l1", 1e-8, 10.0),
+ "lambda_l2": trial.suggest_float("lambda_l2", 1e-8, 10.0),
"num_leaves": trial.suggest_int("num_leaves", 2, 256),
"feature_fraction": trial.suggest_float("feature_fraction", 0.4, 1.0),
"bagging_fraction": trial.suggest_float("bagging_fraction", 0.4, 1.0),
"bagging_freq": trial.suggest_int("bagging_freq", 1, 7),
}
- valid_set (Optional[MQDataset], optional): The validation dataset. Defaults to None.
+ valid_set (MQDataset, optional): The validation dataset. Defaults to None.
Returns:
- Dict[str, Any]: The best hyperparameters found by the optimization process.
+ dict[str, Any]: The best hyperparameters found by the optimization process.
"""
self._dataset = dataset
self._MQObj = MQObjective(
From 9af66e506a369a6997ec1d9bb061a29ecba62980 Mon Sep 17 00:00:00 2001
From: RektPunk
Date: Tue, 10 Sep 2024 21:26:55 +0900
Subject: [PATCH 2/2] remove p tag
---
README.md | 5 -----
1 file changed, 5 deletions(-)
diff --git a/README.md b/README.md
index 40974f3..6980308 100644
--- a/README.md
+++ b/README.md
@@ -20,11 +20,6 @@
-
-
-
**MQBoost** introduces an advanced model for estimating multiple quantiles while ensuring the non-crossing condition (monotone quantile condition). This model harnesses the capabilities of both [LightGBM](https://github.com/microsoft/LightGBM) and [XGBoost](https://github.com/dmlc/xgboost), two leading gradient boosting frameworks.
By implementing the hyperparameter optimization prowess of [Optuna](https://github.com/optuna/optuna), the model achieves great performance. Optuna's optimization algorithms fine-tune the hyperparameters, ensuring the model operates efficiently.