From 65a0c694fad343b7837f5a2369c421bdba14e637 Mon Sep 17 00:00:00 2001
From: glemaitre Course presentation
"Machine learning in Python with scikit-learn MOOC"
,
- is available starting on October 18, 2022 and will last for 3 months. Enroll for
- the full MOOC experience (quizz solutions, executable notebooks, discussion
+ is available starting on November 8th, 2023 and will remain open in self-paced mode.
+ Enroll for the full MOOC experience (quiz solutions, executable notebooks, discussion
forum, etc ...) !
The MOOC is free and the platform does not use the student data for any other purpose
@@ -742,7 +742,7 @@
MOOC material10.5281/zenodo.7220306.
The following repository includes the notebooks, exercises and solutions to the -exercises (but not the quizz solutions ;):
+exercises (but not the quizzes’ solutions ;):The MOOC material is also published as a static website at:
diff --git a/searchindex.js b/searchindex.js index 940a31ee4..500a78713 100644 --- a/searchindex.js +++ b/searchindex.js @@ -1 +1 @@ -Search.setIndex({"docnames": ["appendix/acknowledgement", "appendix/datasets_intro", "appendix/glossary", "appendix/notebook_timings", "appendix/toc_redirect", "concluding_remarks", "concluding_remarks_video", "ensemble/bagging_slides", "ensemble/boosting_slides", "ensemble/ensemble_boosting_index", "ensemble/ensemble_bootstrap_index", "ensemble/ensemble_hyperparameters_index", "ensemble/ensemble_module_intro", "ensemble/ensemble_module_take_away", "ensemble/ensemble_quiz_m6_01", "ensemble/ensemble_quiz_m6_02", "ensemble/ensemble_quiz_m6_03", "ensemble/ensemble_wrap_up_quiz", "evaluation/cross_validation_baseline_index", "evaluation/cross_validation_choices_index", "evaluation/cross_validation_nested_index", "evaluation/evaluation_module_intro", "evaluation/evaluation_module_take_away", "evaluation/evaluation_quiz_m7_01", "evaluation/evaluation_quiz_m7_02", "evaluation/evaluation_quiz_m7_03", "evaluation/evaluation_quiz_m7_04", "evaluation/evaluation_quiz_m7_05", "evaluation/evaluation_wrap_up_quiz", "evaluation/metrics_classification_index", "evaluation/metrics_regression_index", "feature_selection/feature_selection_limitation_index", "feature_selection/feature_selection_module_intro", "feature_selection/feature_selection_module_take_away", "feature_selection/feature_selection_quiz", "index", "interpretation/interpretation_quiz", "linear_models/linear_models_intuitions_index", "linear_models/linear_models_module_intro", "linear_models/linear_models_module_take_away", "linear_models/linear_models_non_linear_index", "linear_models/linear_models_quiz_m4_01", "linear_models/linear_models_quiz_m4_02", "linear_models/linear_models_quiz_m4_03", "linear_models/linear_models_regularization_index", "linear_models/linear_models_slides", "linear_models/linear_models_wrap_up_quiz", "linear_models/regularized_linear_models_slides", "ml_concepts/quiz_intro_01", "ml_concepts/slides", "overfit/bias_vs_variance_quiz_m2_03", "overfit/bias_vs_variance_slides", "overfit/learning_validation_curves_quiz_m2_02", "overfit/learning_validation_curves_slides", "overfit/overfit_bias_variance_index", "overfit/overfit_module_intro", "overfit/overfit_overfitting_underfitting_index", "overfit/overfit_take_away", "overfit/overfit_validation_learning_curves_index", "overfit/overfit_wrap_up_quiz", "overfit/overfitting_vs_under_fitting_quiz_m2_01", "overfit/overfitting_vs_under_fitting_slides", "predictive_modeling_pipeline/01_tabular_data_exploration_index", "predictive_modeling_pipeline/01_tabular_data_exploration_quiz_m1_01", "predictive_modeling_pipeline/02_numerical_pipeline_index", "predictive_modeling_pipeline/02_numerical_pipeline_quiz_m1_02", "predictive_modeling_pipeline/02_numerical_pipeline_video_cross_validation", "predictive_modeling_pipeline/03_categorical_pipeline_index", "predictive_modeling_pipeline/03_categorical_pipeline_quiz_m1_03", "predictive_modeling_pipeline/03_categorical_pipeline_visualization_video", "predictive_modeling_pipeline/predictive_modeling_module_intro", "predictive_modeling_pipeline/predictive_modeling_module_take_away", "predictive_modeling_pipeline/wrap_up_quiz", "python_scripts/01_tabular_data_exploration", "python_scripts/01_tabular_data_exploration_ex_01", "python_scripts/01_tabular_data_exploration_sol_01", "python_scripts/02_numerical_pipeline_cross_validation", "python_scripts/02_numerical_pipeline_ex_00", "python_scripts/02_numerical_pipeline_ex_01", "python_scripts/02_numerical_pipeline_hands_on", "python_scripts/02_numerical_pipeline_introduction", "python_scripts/02_numerical_pipeline_scaling", "python_scripts/02_numerical_pipeline_sol_00", "python_scripts/02_numerical_pipeline_sol_01", "python_scripts/03_categorical_pipeline", "python_scripts/03_categorical_pipeline_column_transformer", "python_scripts/03_categorical_pipeline_ex_01", "python_scripts/03_categorical_pipeline_ex_02", "python_scripts/03_categorical_pipeline_sol_01", "python_scripts/03_categorical_pipeline_sol_02", "python_scripts/03_categorical_pipeline_visualization", "python_scripts/cross_validation_baseline", "python_scripts/cross_validation_ex_01", "python_scripts/cross_validation_ex_02", "python_scripts/cross_validation_grouping", "python_scripts/cross_validation_learning_curve", "python_scripts/cross_validation_nested", "python_scripts/cross_validation_sol_01", "python_scripts/cross_validation_sol_02", "python_scripts/cross_validation_stratification", "python_scripts/cross_validation_time", "python_scripts/cross_validation_train_test", "python_scripts/cross_validation_validation_curve", "python_scripts/datasets_adult_census", "python_scripts/datasets_ames_housing", "python_scripts/datasets_bike_rides", "python_scripts/datasets_blood_transfusion", "python_scripts/datasets_california_housing", "python_scripts/dev_features_importance", "python_scripts/ensemble_adaboost", "python_scripts/ensemble_bagging", "python_scripts/ensemble_ex_01", "python_scripts/ensemble_ex_02", "python_scripts/ensemble_ex_03", "python_scripts/ensemble_ex_04", "python_scripts/ensemble_gradient_boosting", "python_scripts/ensemble_hist_gradient_boosting", "python_scripts/ensemble_hyperparameters", "python_scripts/ensemble_introduction", "python_scripts/ensemble_random_forest", "python_scripts/ensemble_sol_01", "python_scripts/ensemble_sol_02", "python_scripts/ensemble_sol_03", "python_scripts/ensemble_sol_04", "python_scripts/feature_selection_ex_01", "python_scripts/feature_selection_introduction", "python_scripts/feature_selection_limitation_model", "python_scripts/feature_selection_sol_01", "python_scripts/linear_models_ex_01", "python_scripts/linear_models_ex_02", "python_scripts/linear_models_ex_03", "python_scripts/linear_models_ex_04", "python_scripts/linear_models_feature_engineering_classification", "python_scripts/linear_models_regularization", "python_scripts/linear_models_sol_01", "python_scripts/linear_models_sol_02", "python_scripts/linear_models_sol_03", "python_scripts/linear_models_sol_04", "python_scripts/linear_regression_in_sklearn", "python_scripts/linear_regression_non_linear_link", "python_scripts/linear_regression_without_sklearn", "python_scripts/logistic_regression", "python_scripts/metrics_classification", "python_scripts/metrics_ex_01", "python_scripts/metrics_ex_02", "python_scripts/metrics_regression", "python_scripts/metrics_sol_01", "python_scripts/metrics_sol_02", "python_scripts/parameter_tuning_ex_02", "python_scripts/parameter_tuning_ex_03", "python_scripts/parameter_tuning_grid_search", "python_scripts/parameter_tuning_manual", "python_scripts/parameter_tuning_nested", "python_scripts/parameter_tuning_parallel_plot", "python_scripts/parameter_tuning_randomized_search", "python_scripts/parameter_tuning_sol_02", "python_scripts/parameter_tuning_sol_03", "python_scripts/trees_classification", "python_scripts/trees_dataset", "python_scripts/trees_ex_01", "python_scripts/trees_ex_02", "python_scripts/trees_hyperparameters", "python_scripts/trees_regression", "python_scripts/trees_sol_01", "python_scripts/trees_sol_02", "toc", "trees/slides", "trees/trees_classification_index", "trees/trees_hyperparameters_index", "trees/trees_intuitions_index", "trees/trees_module_intro", "trees/trees_module_take_away", "trees/trees_quiz_m5_01", "trees/trees_quiz_m5_02", "trees/trees_quiz_m5_03", "trees/trees_quiz_m5_04", "trees/trees_regression_index", "trees/trees_wrap_up_quiz", "tuning/parameter_tuning_automated_index", "tuning/parameter_tuning_automated_quiz_m3_02", "tuning/parameter_tuning_manual_index", "tuning/parameter_tuning_manual_quiz_m3_01", "tuning/parameter_tuning_module_intro", "tuning/parameter_tuning_module_take_away", "tuning/parameter_tuning_parallel_plot_video", "tuning/parameter_tuning_wrap_up_quiz"], "filenames": ["appendix/acknowledgement.md", "appendix/datasets_intro.md", "appendix/glossary.md", "appendix/notebook_timings.md", "appendix/toc_redirect.md", "concluding_remarks.md", "concluding_remarks_video.md", "ensemble/bagging_slides.md", "ensemble/boosting_slides.md", "ensemble/ensemble_boosting_index.md", "ensemble/ensemble_bootstrap_index.md", "ensemble/ensemble_hyperparameters_index.md", "ensemble/ensemble_module_intro.md", "ensemble/ensemble_module_take_away.md", "ensemble/ensemble_quiz_m6_01.md", "ensemble/ensemble_quiz_m6_02.md", "ensemble/ensemble_quiz_m6_03.md", "ensemble/ensemble_wrap_up_quiz.md", "evaluation/cross_validation_baseline_index.md", "evaluation/cross_validation_choices_index.md", "evaluation/cross_validation_nested_index.md", "evaluation/evaluation_module_intro.md", "evaluation/evaluation_module_take_away.md", "evaluation/evaluation_quiz_m7_01.md", "evaluation/evaluation_quiz_m7_02.md", "evaluation/evaluation_quiz_m7_03.md", "evaluation/evaluation_quiz_m7_04.md", "evaluation/evaluation_quiz_m7_05.md", "evaluation/evaluation_wrap_up_quiz.md", "evaluation/metrics_classification_index.md", "evaluation/metrics_regression_index.md", "feature_selection/feature_selection_limitation_index.md", "feature_selection/feature_selection_module_intro.md", "feature_selection/feature_selection_module_take_away.md", "feature_selection/feature_selection_quiz.md", "index.md", "interpretation/interpretation_quiz.md", "linear_models/linear_models_intuitions_index.md", "linear_models/linear_models_module_intro.md", "linear_models/linear_models_module_take_away.md", "linear_models/linear_models_non_linear_index.md", "linear_models/linear_models_quiz_m4_01.md", "linear_models/linear_models_quiz_m4_02.md", "linear_models/linear_models_quiz_m4_03.md", "linear_models/linear_models_regularization_index.md", "linear_models/linear_models_slides.md", "linear_models/linear_models_wrap_up_quiz.md", "linear_models/regularized_linear_models_slides.md", "ml_concepts/quiz_intro_01.md", "ml_concepts/slides.md", "overfit/bias_vs_variance_quiz_m2_03.md", "overfit/bias_vs_variance_slides.md", "overfit/learning_validation_curves_quiz_m2_02.md", "overfit/learning_validation_curves_slides.md", "overfit/overfit_bias_variance_index.md", "overfit/overfit_module_intro.md", "overfit/overfit_overfitting_underfitting_index.md", "overfit/overfit_take_away.md", "overfit/overfit_validation_learning_curves_index.md", "overfit/overfit_wrap_up_quiz.md", "overfit/overfitting_vs_under_fitting_quiz_m2_01.md", "overfit/overfitting_vs_under_fitting_slides.md", "predictive_modeling_pipeline/01_tabular_data_exploration_index.md", "predictive_modeling_pipeline/01_tabular_data_exploration_quiz_m1_01.md", "predictive_modeling_pipeline/02_numerical_pipeline_index.md", "predictive_modeling_pipeline/02_numerical_pipeline_quiz_m1_02.md", "predictive_modeling_pipeline/02_numerical_pipeline_video_cross_validation.md", "predictive_modeling_pipeline/03_categorical_pipeline_index.md", "predictive_modeling_pipeline/03_categorical_pipeline_quiz_m1_03.md", "predictive_modeling_pipeline/03_categorical_pipeline_visualization_video.md", "predictive_modeling_pipeline/predictive_modeling_module_intro.md", "predictive_modeling_pipeline/predictive_modeling_module_take_away.md", "predictive_modeling_pipeline/wrap_up_quiz.md", "python_scripts/01_tabular_data_exploration.py", "python_scripts/01_tabular_data_exploration_ex_01.py", "python_scripts/01_tabular_data_exploration_sol_01.py", "python_scripts/02_numerical_pipeline_cross_validation.py", "python_scripts/02_numerical_pipeline_ex_00.py", "python_scripts/02_numerical_pipeline_ex_01.py", "python_scripts/02_numerical_pipeline_hands_on.py", "python_scripts/02_numerical_pipeline_introduction.py", "python_scripts/02_numerical_pipeline_scaling.py", "python_scripts/02_numerical_pipeline_sol_00.py", "python_scripts/02_numerical_pipeline_sol_01.py", "python_scripts/03_categorical_pipeline.py", "python_scripts/03_categorical_pipeline_column_transformer.py", "python_scripts/03_categorical_pipeline_ex_01.py", "python_scripts/03_categorical_pipeline_ex_02.py", "python_scripts/03_categorical_pipeline_sol_01.py", "python_scripts/03_categorical_pipeline_sol_02.py", "python_scripts/03_categorical_pipeline_visualization.py", "python_scripts/cross_validation_baseline.py", "python_scripts/cross_validation_ex_01.py", "python_scripts/cross_validation_ex_02.py", "python_scripts/cross_validation_grouping.py", "python_scripts/cross_validation_learning_curve.py", "python_scripts/cross_validation_nested.py", "python_scripts/cross_validation_sol_01.py", "python_scripts/cross_validation_sol_02.py", "python_scripts/cross_validation_stratification.py", "python_scripts/cross_validation_time.py", "python_scripts/cross_validation_train_test.py", "python_scripts/cross_validation_validation_curve.py", "python_scripts/datasets_adult_census.py", "python_scripts/datasets_ames_housing.py", "python_scripts/datasets_bike_rides.py", "python_scripts/datasets_blood_transfusion.py", "python_scripts/datasets_california_housing.py", "python_scripts/dev_features_importance.py", "python_scripts/ensemble_adaboost.py", "python_scripts/ensemble_bagging.py", "python_scripts/ensemble_ex_01.py", "python_scripts/ensemble_ex_02.py", "python_scripts/ensemble_ex_03.py", "python_scripts/ensemble_ex_04.py", "python_scripts/ensemble_gradient_boosting.py", "python_scripts/ensemble_hist_gradient_boosting.py", "python_scripts/ensemble_hyperparameters.py", "python_scripts/ensemble_introduction.py", "python_scripts/ensemble_random_forest.py", "python_scripts/ensemble_sol_01.py", "python_scripts/ensemble_sol_02.py", "python_scripts/ensemble_sol_03.py", "python_scripts/ensemble_sol_04.py", "python_scripts/feature_selection_ex_01.py", "python_scripts/feature_selection_introduction.py", "python_scripts/feature_selection_limitation_model.py", "python_scripts/feature_selection_sol_01.py", "python_scripts/linear_models_ex_01.py", "python_scripts/linear_models_ex_02.py", "python_scripts/linear_models_ex_03.py", "python_scripts/linear_models_ex_04.py", "python_scripts/linear_models_feature_engineering_classification.py", "python_scripts/linear_models_regularization.py", "python_scripts/linear_models_sol_01.py", "python_scripts/linear_models_sol_02.py", "python_scripts/linear_models_sol_03.py", "python_scripts/linear_models_sol_04.py", "python_scripts/linear_regression_in_sklearn.py", "python_scripts/linear_regression_non_linear_link.py", "python_scripts/linear_regression_without_sklearn.py", "python_scripts/logistic_regression.py", "python_scripts/metrics_classification.py", "python_scripts/metrics_ex_01.py", "python_scripts/metrics_ex_02.py", "python_scripts/metrics_regression.py", "python_scripts/metrics_sol_01.py", "python_scripts/metrics_sol_02.py", "python_scripts/parameter_tuning_ex_02.py", "python_scripts/parameter_tuning_ex_03.py", "python_scripts/parameter_tuning_grid_search.py", "python_scripts/parameter_tuning_manual.py", "python_scripts/parameter_tuning_nested.py", "python_scripts/parameter_tuning_parallel_plot.py", "python_scripts/parameter_tuning_randomized_search.py", "python_scripts/parameter_tuning_sol_02.py", "python_scripts/parameter_tuning_sol_03.py", "python_scripts/trees_classification.py", "python_scripts/trees_dataset.py", "python_scripts/trees_ex_01.py", "python_scripts/trees_ex_02.py", "python_scripts/trees_hyperparameters.py", "python_scripts/trees_regression.py", "python_scripts/trees_sol_01.py", "python_scripts/trees_sol_02.py", "toc.md", "trees/slides.md", "trees/trees_classification_index.md", "trees/trees_hyperparameters_index.md", "trees/trees_intuitions_index.md", "trees/trees_module_intro.md", "trees/trees_module_take_away.md", "trees/trees_quiz_m5_01.md", "trees/trees_quiz_m5_02.md", "trees/trees_quiz_m5_03.md", "trees/trees_quiz_m5_04.md", "trees/trees_regression_index.md", "trees/trees_wrap_up_quiz.md", "tuning/parameter_tuning_automated_index.md", "tuning/parameter_tuning_automated_quiz_m3_02.md", "tuning/parameter_tuning_manual_index.md", "tuning/parameter_tuning_manual_quiz_m3_01.md", "tuning/parameter_tuning_module_intro.md", "tuning/parameter_tuning_module_take_away.md", "tuning/parameter_tuning_parallel_plot_video.md", "tuning/parameter_tuning_wrap_up_quiz.md"], "titles": ["Acknowledgement", "Datasets description", "Glossary", "Notebook timings", "Table of contents", "Concluding remarks", "\ud83c\udfa5 Concluding remarks", "\ud83c\udfa5 Intuitions on ensemble models: bagging", "\ud83c\udfa5 Intuitions on ensemble models: boosting", "Ensemble based on boosting", "Ensemble method using bootstrapping", "Hyperparameter tuning with ensemble methods", "Module overview", "Main take-away", "\u2705 Quiz M6.01", "\u2705 Quiz M6.02", "\u2705 Quiz M6.03", "\ud83c\udfc1 Wrap-up quiz 6", "Comparing a model with simple baselines", "Choice of cross-validation", "Nested cross-validation", "Module overview", "Main take-away", "\u2705 Quiz M7.01", "\u2705 Quiz M7.02", "\u2705 Quiz M7.03", "\u2705 Quiz M7.04", "\u2705 Quiz M7.05", "\ud83c\udfc1 Wrap-up quiz 7", "Classification metrics", "Regression metrics", "Caveats of feature selection", "Module overview", "Main take-away", "\u2705 Quiz", "Introduction", "\u2705 Quiz", "Intuitions on linear models", "Module overview", "Main take-away", "Non-linear feature engineering for linear models", "\u2705 Quiz M4.01", "\u2705 Quiz M4.02", "\u2705 Quiz M4.03", "Regularization in linear model", "\ud83c\udfa5 Intuitions on linear models", "\ud83c\udfc1 Wrap-up quiz 4", "\ud83c\udfa5 Intuitions on regularized linear models", "\u2705 Quiz Intro.01", "\ud83c\udfa5 Introducing machine-learning concepts", "\u2705 Quiz M2.03", "\ud83c\udfa5 Bias versus Variance", "\u2705 Quiz M2.02", "\ud83c\udfa5 Comparing train and test errors", "Bias versus variance trade-off", "Module overview", "Overfitting and underfitting", "Main take-away", "Validation and learning curves", "\ud83c\udfc1 Wrap-up quiz 2", "\u2705 Quiz M2.01", "\ud83c\udfa5 Overfitting and Underfitting", "Tabular data exploration", "\u2705 Quiz M1.01", "Fitting a scikit-learn model on numerical data", "\u2705 Quiz M1.02", "\ud83c\udfa5 Validation of a model", "Handling categorical data", "\u2705 Quiz M1.03", "\ud83c\udfa5 Visualizing scikit-learn pipelines in Jupyter", "Module overview", "Main take-away", "\ud83c\udfc1 Wrap-up quiz 1", "First look at our dataset", "\ud83d\udcdd Exercise M1.01", "\ud83d\udcc3 Solution for Exercise M1.01", "Model evaluation using cross-validation", "\ud83d\udcdd Exercise M1.02", "\ud83d\udcdd Exercise M1.03", "Working with numerical data", "First model with scikit-learn", "Preprocessing for numerical features", "\ud83d\udcc3 Solution for Exercise M1.02", "\ud83d\udcc3 Solution for Exercise M1.03", "Encoding of categorical variables", "Using numerical and categorical variables together", "\ud83d\udcdd Exercise M1.04", "\ud83d\udcdd Exercise M1.05", "\ud83d\udcc3 Solution for Exercise M1.04", "\ud83d\udcc3 Solution for Exercise M1.05", "Visualizing scikit-learn pipelines in Jupyter", "Comparing model performance with a simple baseline", "\ud83d\udcdd Exercise M2.01", "\ud83d\udcdd Exercise M7.01", "Sample grouping", "Effect of the sample size in cross-validation", "Nested cross-validation", "\ud83d\udcc3 Solution for Exercise M2.01", "\ud83d\udcc3 Solution for Exercise M7.01", "Stratification", "Non i.i.d. data", "Cross-validation framework", "Overfit-generalization-underfit", "The adult census dataset", "The Ames housing dataset", "The bike rides dataset", "The blood transfusion dataset", "The California housing dataset", "Feature importance", "Adaptive Boosting (AdaBoost)", "Bagging", "\ud83d\udcdd Exercise M6.01", "\ud83d\udcdd Exercise M6.02", "\ud83d\udcdd Exercise M6.03", "\ud83d\udcdd Exercise M6.04", "Gradient-boosting decision tree (GBDT)", "Speeding-up gradient-boosting", "Hyperparameter tuning", "Introductory example to ensemble models", "Random forests", "\ud83d\udcc3 Solution for Exercise M6.01", "\ud83d\udcc3 Solution for Exercise M6.02", "\ud83d\udcc3 Solution for Exercise M6.03", "\ud83d\udcc3 Solution for Exercise M6.04", "\ud83d\udcdd Exercise 01", "Benefits of using feature selection", "Limitation of selecting feature using a model", "\ud83d\udcc3 Solution for Exercise 01", "\ud83d\udcdd Exercise M4.01", "\ud83d\udcdd Exercise M4.02", "\ud83d\udcdd Exercise M4.03", "\ud83d\udcdd Exercise M4.04", "Non-linear feature engineering for Logistic Regression", "Regularization of linear regression model", "\ud83d\udcc3 Solution for Exercise M4.01", "\ud83d\udcc3 Solution for Exercise M4.02", "\ud83d\udcc3 Solution for Exercise M4.03", "\ud83d\udcc3 Solution for Exercise M4.04", "Linear regression using scikit-learn", "Non-linear feature engineering for Linear Regression", "Linear regression without scikit-learn", "Linear models for classification", "Classification", "\ud83d\udcdd Exercise M7.02", "\ud83d\udcdd Exercise M7.03", "Regression", "\ud83d\udcc3 Solution for Exercise M7.02", "\ud83d\udcc3 Solution for Exercise M7.03", "\ud83d\udcdd Exercise M3.01", "\ud83d\udcdd Exercise M3.02", "Hyperparameter tuning by grid-search", "Set and get hyperparameters in scikit-learn", "Evaluation and hyperparameter tuning", "Analysis of hyperparameter search results", "Hyperparameter tuning by randomized-search", "\ud83d\udcc3 Solution for Exercise M3.01", "\ud83d\udcc3 Solution for Exercise M3.02", "Build a classification decision tree", "The penguins datasets", "\ud83d\udcdd Exercise M5.01", "\ud83d\udcdd Exercise M5.02", "Importance of decision tree hyperparameters on generalization", "Decision tree for regression", "\ud83d\udcc3 Solution for Exercise M5.01", "\ud83d\udcc3 Solution for Exercise M5.02", "Table of contents", "\ud83c\udfa5 Intuitions on tree-based models", "Decision tree in classification", "Hyperparameters of decision tree", "Intuitions on tree-based models", "Module overview", "Main take-away", "\u2705 Quiz M5.01", "\u2705 Quiz M5.02", "\u2705 Quiz M5.03", "\u2705 Quiz M5.04", "Decision tree in regression", "\ud83c\udfc1 Wrap-up quiz 5", "Automated tuning", "\u2705 Quiz M3.02", "Manual tuning", "\u2705 Quiz M3.01", "Module overview", "Main take-away", "\ud83c\udfa5 Analysis of hyperparameter search results", "\ud83c\udfc1 Wrap-up quiz 3"], "terms": {"The": [0, 1, 2, 12, 17, 21, 32, 35, 36, 38, 41, 43, 46, 48, 55, 57, 59, 65, 70, 72, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 108, 109, 110, 111, 112, 113, 114, 116, 117, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 170, 172, 174, 177, 182, 183, 185], "diagram": [0, 2, 81], "present": [0, 7, 8, 12, 21, 22, 28, 33, 38, 45, 47, 49, 51, 53, 55, 61, 66, 70, 72, 73, 80, 84, 91, 95, 99, 100, 102, 103, 104, 105, 106, 107, 109, 110, 115, 116, 118, 119, 126, 138, 140, 142, 143, 145, 146, 152, 154, 158, 162, 166, 170, 171, 172], "api": [0, 5, 71, 77, 80, 82, 85, 142], "design": [0, 2, 5, 35, 79, 104, 139, 145], "modul": [0, 2, 13, 17, 22, 33, 35, 39, 46, 57, 71, 73, 79, 85, 103, 107, 131, 132, 135, 137, 142, 150, 151, 152, 159, 163, 165, 171, 183], "predict": [0, 12, 15, 17, 21, 22, 23, 24, 28, 32, 35, 38, 39, 41, 42, 46, 48, 50, 55, 57, 59, 60, 65, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 81, 82, 83, 85, 86, 88, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 118, 119, 120, 121, 122, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 145, 147, 149, 151, 153, 156, 157, 158, 159, 160, 161, 162, 164, 170, 174, 177, 181, 182, 185], "model": [0, 9, 10, 12, 13, 14, 16, 17, 21, 22, 23, 24, 25, 26, 27, 28, 31, 32, 33, 34, 35, 36, 38, 39, 41, 42, 43, 46, 50, 52, 55, 57, 59, 60, 63, 65, 68, 70, 71, 72, 73, 75, 77, 78, 79, 82, 83, 84, 86, 87, 88, 89, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 106, 107, 110, 111, 113, 114, 116, 117, 119, 120, 122, 123, 124, 125, 127, 129, 130, 131, 135, 136, 137, 138, 139, 140, 142, 143, 144, 145, 146, 147, 148, 149, 151, 153, 155, 156, 157, 160, 161, 162, 163, 164, 170, 171, 177, 179, 181, 182, 183, 185], "pipelin": [0, 2, 12, 21, 32, 33, 35, 38, 42, 46, 49, 55, 59, 65, 67, 70, 71, 72, 76, 81, 85, 86, 88, 92, 93, 94, 96, 97, 98, 99, 104, 107, 108, 116, 119, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 139, 141, 148, 149, 150, 151, 152, 154, 155, 156, 170, 179, 181, 182, 185], "us": [0, 13, 16, 17, 21, 22, 23, 25, 26, 27, 28, 31, 32, 33, 34, 35, 36, 37, 38, 39, 41, 42, 43, 46, 48, 55, 59, 63, 64, 65, 67, 68, 70, 71, 72, 73, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 151, 152, 153, 155, 156, 157, 158, 159, 160, 162, 163, 164, 165, 172, 173, 177, 179, 182, 185], "follow": [0, 2, 12, 13, 16, 17, 21, 22, 26, 28, 32, 33, 39, 41, 43, 46, 48, 55, 57, 59, 65, 68, 70, 71, 72, 73, 77, 80, 81, 82, 84, 85, 92, 93, 94, 96, 97, 98, 100, 108, 110, 114, 115, 117, 119, 123, 130, 131, 132, 133, 136, 137, 138, 139, 140, 141, 142, 145, 148, 149, 150, 151, 152, 154, 155, 156, 158, 162, 170, 171, 177, 179, 182, 183, 185], "paramet": [0, 5, 14, 17, 25, 26, 27, 28, 36, 39, 41, 43, 46, 52, 55, 57, 59, 65, 72, 76, 77, 78, 79, 81, 82, 83, 84, 86, 88, 92, 96, 97, 98, 101, 102, 108, 109, 110, 111, 113, 114, 117, 118, 119, 120, 122, 123, 126, 129, 132, 135, 138, 140, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 159, 163, 170, 177, 179, 181, 182, 185], "free": [0, 5, 35, 84, 114, 117, 123, 133, 185], "icon": [0, 35], "licens": [0, 35], "under": [0, 23, 28, 35, 39, 73, 92, 97, 102, 118, 132, 139, 142, 145, 161, 170], "cc": [0, 35], "BY": [0, 35], "3": [0, 3, 17, 28, 35, 41, 46, 55, 59, 72, 73, 75, 79, 80, 81, 82, 83, 84, 90, 91, 92, 94, 96, 97, 98, 99, 101, 104, 105, 106, 107, 109, 110, 114, 115, 116, 117, 118, 120, 121, 123, 125, 126, 129, 130, 132, 133, 135, 136, 139, 140, 141, 142, 143, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 163, 164, 165, 170, 177, 182], "0": [0, 2, 3, 17, 28, 36, 41, 43, 46, 65, 72, 73, 75, 76, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 162, 163, 164, 177, 179, 185], "sourc": [0, 117], "set": [0, 5, 17, 25, 28, 34, 36, 38, 39, 41, 43, 46, 50, 52, 55, 57, 59, 60, 63, 65, 72, 73, 76, 78, 79, 80, 81, 83, 84, 85, 86, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 147, 148, 149, 150, 152, 154, 155, 156, 157, 158, 159, 161, 162, 163, 165, 175, 177, 179, 180, 181, 182, 185], "gear": 0, "svg": 0, "vector": [0, 41, 42, 92, 97, 99, 101, 115, 124, 127, 128, 132, 134, 139, 142], "cc0": 0, "close": [0, 2, 28, 43, 48, 73, 81, 96, 99, 100, 101, 107, 108, 115, 122, 123, 132, 133, 137, 138, 139, 141, 145, 150, 152, 154, 157], "mit": 0, "penguin": [1, 17, 74, 75, 109, 112, 121, 128, 129, 131, 134, 135, 137, 138, 140, 141, 157, 159, 160, 162, 163, 164, 165, 185], "adult": [1, 63, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 93, 98, 119, 130, 136, 148, 150, 151, 152, 154, 155, 165], "censu": [1, 63, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 93, 98, 101, 107, 119, 130, 136, 148, 150, 151, 152, 154, 155, 165], "california": [1, 91, 101, 104, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 165], "hous": [1, 2, 48, 72, 90, 91, 95, 101, 102, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 133, 144, 145, 147, 165], "am": [1, 133, 144, 145, 147, 165], "blood": [1, 92, 97, 142, 143, 146, 165], "transfus": [1, 92, 97, 142, 143, 146, 165], "bike": [1, 28, 165], "ride": [1, 28, 165], "aim": [2, 35, 79, 92, 97, 100, 101, 109, 111, 112, 113, 114, 118, 120, 121, 122, 123, 124, 125, 127, 128, 129, 134, 135, 140, 142, 143, 146, 159, 160, 163, 164], "describ": [2, 68, 79, 80, 81, 91, 107, 185], "For": [2, 5, 35, 41, 52, 57, 70, 73, 76, 79, 80, 81, 84, 85, 90, 91, 94, 96, 98, 101, 102, 105, 107, 108, 112, 113, 115, 117, 119, 121, 122, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 139, 140, 141, 142, 145, 147, 150, 151, 152, 154, 157, 161, 163, 164, 173, 177, 182], "you": [2, 5, 13, 17, 22, 28, 33, 35, 39, 46, 49, 57, 59, 65, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 95, 97, 98, 99, 101, 102, 103, 105, 107, 108, 109, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 171, 175, 177, 179, 181, 183, 185], "don": [2, 5, 77, 82, 92, 97, 101, 106, 124, 127, 133, 150], "t": [2, 5, 77, 82, 84, 92, 94, 97, 100, 101, 103, 106, 108, 116, 123, 124, 127, 133, 142, 145, 146, 150], "find": [2, 5, 48, 57, 73, 79, 80, 81, 84, 92, 93, 96, 97, 98, 100, 102, 111, 112, 114, 120, 121, 123, 126, 127, 129, 131, 132, 133, 135, 137, 138, 148, 149, 150, 152, 154, 155, 156, 157, 159, 160, 163, 164, 170, 179], "we": [2, 12, 13, 17, 21, 22, 24, 27, 28, 32, 33, 36, 38, 39, 41, 46, 48, 52, 55, 57, 59, 63, 65, 68, 70, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 170, 171, 177, 179, 181, 182, 185], "ad": [2, 17, 39, 46, 52, 92, 95, 97, 108, 113, 117, 119, 122, 139, 150, 154], "bottom": [2, 73, 142], "page": [2, 35, 79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "acronym": [2, 100], "stand": [2, 80, 110, 179], "applic": [2, 5, 80, 94, 100, 142, 145, 161], "program": [2, 17, 28, 35, 46, 59, 70, 72, 80, 94, 124, 127, 177, 185], "interfac": [2, 80], "It": [2, 5, 28, 35, 57, 73, 77, 80, 81, 82, 84, 85, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 115, 116, 118, 119, 123, 124, 127, 132, 133, 137, 140, 142, 143, 145, 146, 147, 152, 153, 154, 157, 161, 162, 163], "can": [2, 5, 13, 14, 17, 21, 22, 27, 28, 33, 35, 38, 39, 43, 46, 52, 55, 57, 59, 65, 68, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 164, 171, 172, 174, 177, 179, 182, 183, 185], "have": [2, 24, 28, 41, 46, 48, 52, 57, 59, 65, 68, 73, 76, 78, 79, 80, 81, 83, 84, 88, 91, 95, 96, 99, 100, 101, 104, 105, 106, 107, 108, 109, 112, 115, 116, 117, 118, 119, 121, 122, 124, 125, 127, 129, 132, 133, 134, 135, 136, 139, 140, 141, 142, 145, 148, 150, 151, 152, 153, 154, 155, 156, 157, 158, 161, 162, 163, 170, 177, 179, 182, 183, 185], "slightli": [2, 15, 73, 82, 84, 85, 101, 105, 109, 110, 117, 118, 119, 122, 137], "differ": [2, 5, 14, 15, 17, 21, 24, 28, 36, 41, 43, 46, 52, 57, 59, 63, 65, 68, 72, 73, 74, 75, 76, 80, 81, 84, 85, 89, 91, 92, 94, 95, 96, 97, 99, 100, 101, 103, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 127, 128, 131, 132, 133, 134, 135, 137, 138, 139, 143, 145, 146, 147, 149, 151, 152, 153, 154, 156, 157, 158, 161, 162, 177, 185], "mean": [2, 5, 27, 28, 46, 59, 72, 73, 76, 77, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 127, 129, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 144, 145, 146, 147, 150, 151, 152, 154, 155, 156, 157, 159, 162, 163, 174, 185], "context": [2, 5, 14, 100, 141, 142], "some": [2, 5, 17, 21, 22, 28, 32, 35, 46, 59, 63, 65, 70, 72, 73, 74, 75, 76, 78, 79, 80, 81, 83, 84, 86, 88, 90, 94, 96, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 112, 114, 115, 116, 117, 118, 119, 121, 123, 124, 126, 127, 132, 133, 136, 137, 139, 140, 141, 145, 147, 150, 152, 153, 154, 156, 157, 161, 162, 164, 177, 185], "case": [2, 28, 32, 48, 73, 76, 79, 81, 84, 85, 87, 89, 90, 91, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 108, 115, 117, 119, 124, 127, 129, 132, 133, 135, 136, 137, 140, 141, 142, 143, 145, 146, 147, 149, 151, 152, 154, 156, 157], "an": [2, 15, 21, 22, 25, 27, 34, 35, 39, 41, 42, 46, 48, 50, 52, 55, 57, 68, 70, 71, 72, 73, 76, 80, 81, 83, 85, 86, 87, 88, 89, 90, 92, 94, 95, 96, 97, 99, 100, 101, 105, 106, 107, 108, 110, 113, 115, 116, 117, 118, 119, 122, 124, 125, 127, 129, 131, 132, 133, 134, 135, 137, 138, 139, 141, 142, 143, 145, 146, 147, 148, 150, 151, 152, 155, 156, 157, 161, 162, 163, 170, 173, 177, 179, 181, 182, 183, 185], "onlin": [2, 91, 93, 98], "servic": [2, 35, 85, 136], "access": [2, 14, 28, 35, 46, 57, 77, 82, 92, 97, 101, 108, 110, 112, 121, 131, 133, 137, 141, 142, 185], "remot": 2, "In": [2, 5, 14, 21, 22, 28, 33, 35, 36, 38, 39, 41, 43, 46, 57, 63, 71, 73, 76, 77, 79, 80, 81, 82, 83, 84, 85, 86, 88, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 123, 125, 126, 127, 128, 129, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 162, 163, 164, 171, 177, 179, 182], "both": [2, 14, 15, 17, 21, 28, 38, 39, 46, 52, 63, 72, 73, 75, 80, 81, 84, 85, 86, 87, 88, 89, 93, 94, 95, 96, 97, 98, 99, 102, 104, 108, 109, 113, 115, 116, 119, 122, 124, 125, 127, 128, 130, 132, 133, 134, 136, 137, 139, 141, 142, 143, 145, 146, 150, 152, 156, 157, 158, 160, 161, 164, 170, 171, 177, 179, 185], "itself": [2, 17, 41, 81, 94, 96, 97, 101, 103, 106, 110, 145, 152, 182], "technic": [2, 12, 21, 32, 35, 38, 55, 70, 80, 170, 182], "specif": [2, 12, 21, 22, 36, 63, 80, 81, 86, 87, 88, 89, 94, 101, 102, 105, 107, 108, 109, 115, 116, 119, 125, 133, 142, 150, 151, 153, 157, 161, 162, 177], "peopl": [2, 5, 73, 94, 101, 107, 108, 142], "who": [2, 5, 85, 142], "write": [2, 35, 74, 77, 78, 86, 87, 89, 92, 93, 94, 111, 112, 113, 114, 124, 128, 129, 130, 131, 139, 143, 144, 148, 149, 155, 159, 160], "client": 2, "connect": 2, "offlin": 2, "librari": [2, 35, 70, 73, 156], "scikit": [2, 12, 13, 21, 22, 23, 26, 27, 32, 33, 36, 37, 38, 39, 41, 43, 55, 57, 59, 63, 65, 67, 70, 71, 76, 77, 79, 81, 82, 84, 85, 86, 88, 93, 98, 99, 101, 107, 109, 112, 114, 116, 119, 121, 123, 124, 127, 132, 133, 139, 141, 142, 143, 145, 146, 147, 150, 152, 159, 163, 165, 170, 171, 173, 174, 180, 182, 183, 185], "list": [2, 28, 35, 46, 59, 84, 87, 89, 99, 103, 108, 110, 111, 113, 120, 122, 133, 143, 144, 146, 147, 151, 185], "all": [2, 14, 15, 16, 17, 23, 25, 26, 27, 28, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 76, 77, 79, 80, 81, 82, 84, 87, 89, 91, 93, 94, 95, 96, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 118, 119, 125, 130, 132, 133, 134, 136, 137, 139, 140, 142, 145, 147, 149, 150, 151, 152, 153, 156, 157, 160, 162, 163, 164, 172, 173, 175, 177, 179, 181, 185], "public": 2, "function": [2, 5, 17, 28, 35, 42, 50, 57, 59, 65, 72, 76, 79, 80, 81, 84, 93, 98, 101, 102, 104, 107, 108, 109, 110, 114, 115, 123, 124, 127, 128, 131, 132, 134, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 150, 152, 153, 157, 159, 162, 163, 174, 185], "class": [2, 14, 24, 26, 41, 59, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 97, 98, 99, 103, 104, 105, 106, 107, 108, 109, 110, 116, 119, 130, 131, 132, 133, 136, 137, 139, 140, 141, 143, 146, 147, 148, 150, 151, 152, 154, 155, 157, 159, 161, 173, 185], "method": [2, 3, 5, 13, 41, 59, 72, 73, 74, 75, 79, 80, 81, 85, 94, 100, 108, 111, 115, 118, 119, 120, 126, 129, 132, 133, 135, 139, 141, 142, 145, 151, 152, 154, 165, 185], "along": [2, 73, 76, 84, 86, 88, 104, 128, 134, 145, 157], "document": [2, 3, 5, 14, 59, 77, 79, 80, 81, 82, 84, 85, 86, 88, 90, 91, 93, 94, 96, 98, 101, 109, 132, 133, 136, 138, 139, 142, 143, 144, 146, 147, 150, 151, 152, 154, 157, 163, 164], "via": [2, 28, 35, 76, 84, 96, 102, 107, 114, 115, 116, 118, 119, 123, 125, 141, 150, 151, 161], "docstr": [2, 145], "brows": 2, "http": [2, 5, 35, 73, 80, 94, 101, 107], "org": [2, 73, 79, 80, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "stabl": [2, 108, 110, 133, 177, 185], "html": [2, 79, 81, 82, 85, 90, 96, 101, 107, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "try": [2, 5, 28, 46, 73, 79, 81, 82, 85, 86, 88, 90, 96, 101, 105, 109, 110, 115, 117, 119, 129, 131, 132, 133, 135, 136, 137, 138, 139, 142, 143, 145, 146, 147, 148, 150, 152, 154, 155, 156, 157, 163, 164, 177, 182], "adopt": [2, 132, 137], "simpl": [2, 5, 46, 73, 78, 83, 92, 97, 104, 109, 110, 118, 119, 131, 133, 137, 139, 140, 141, 151, 157, 163, 165], "convent": [2, 73, 80, 81, 139], "limit": [2, 17, 28, 31, 33, 41, 55, 57, 60, 79, 85, 101, 102, 103, 110, 132, 133, 139, 145, 154, 158, 160, 164, 165], "minimum": [2, 117, 137, 142, 154, 160, 161, 162, 164], "number": [2, 5, 15, 16, 17, 28, 36, 41, 43, 46, 48, 52, 59, 68, 72, 73, 76, 77, 79, 80, 81, 82, 84, 85, 86, 88, 92, 94, 95, 97, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 113, 114, 115, 116, 117, 119, 122, 123, 125, 126, 127, 129, 130, 132, 133, 134, 135, 136, 137, 139, 142, 144, 145, 147, 149, 150, 152, 153, 154, 156, 157, 161, 162, 172, 177, 179, 183, 185], "object": [2, 5, 28, 46, 57, 77, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 101, 103, 104, 105, 106, 119, 133, 136, 141, 142, 143, 146, 148, 150, 151, 152, 154, 155, 156, 157, 162, 177], "must": [2, 5, 43, 108, 130, 133, 136, 152, 185], "implement": [2, 89, 99, 109, 110, 116, 138, 141, 143, 146, 150, 153], "furthermor": [2, 46, 101, 132, 133, 135, 141], "tri": [2, 5, 65, 105, 109, 133, 137], "consist": [2, 39, 46, 72, 76, 80, 101, 129, 130, 135, 136, 139, 142, 147], "name": [2, 14, 28, 59, 73, 75, 79, 80, 81, 82, 83, 84, 85, 91, 92, 97, 98, 99, 101, 103, 104, 105, 106, 107, 108, 110, 115, 117, 120, 123, 129, 130, 131, 133, 135, 136, 137, 140, 141, 142, 150, 151, 153, 154, 181, 185], "same": [2, 17, 25, 28, 36, 39, 43, 46, 65, 73, 78, 79, 80, 81, 83, 84, 85, 89, 94, 95, 96, 98, 99, 101, 102, 103, 104, 108, 109, 110, 115, 116, 125, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 140, 142, 145, 147, 153, 154, 162, 163, 177, 185], "categori": [2, 68, 72, 73, 79, 85, 86, 88, 104, 106, 119, 130, 133, 136, 150, 158], "e": [2, 5, 17, 28, 46, 59, 63, 65, 68, 70, 71, 72, 73, 79, 80, 81, 83, 84, 85, 88, 94, 95, 96, 99, 100, 101, 103, 108, 115, 116, 117, 118, 125, 129, 133, 135, 137, 138, 141, 142, 145, 147, 148, 150, 154, 155, 156, 157, 177, 179, 181, 185], "g": [2, 5, 17, 28, 72, 73, 84, 85, 94, 96, 100, 108, 112, 117, 118, 121, 125, 128, 129, 133, 134, 135, 137, 138, 140, 141, 148, 150, 154, 155, 156, 158, 160, 161, 162, 164, 177, 185], "expos": [2, 81, 84, 119, 142], "fit_transform": [2, 65, 81, 84, 85, 103, 104, 116, 127, 139], "accept": [2, 84, 101], "similar": [2, 43, 46, 59, 65, 77, 81, 82, 84, 87, 89, 103, 104, 110, 117, 125, 129, 130, 132, 133, 135, 136, 139, 142, 147, 150, 151, 152, 153, 154, 157, 185], "argument": [2, 46, 59, 77, 81, 82, 84, 86, 88, 105, 185], "type": [2, 13, 22, 28, 42, 46, 49, 70, 71, 73, 79, 80, 86, 87, 88, 89, 101, 104, 105, 106, 107, 110, 124, 127, 142, 145, 177], "shape": [2, 41, 42, 73, 79, 80, 84, 104, 105, 107, 108, 109, 110, 132, 139, 141, 145, 157, 162], "those": [2, 14, 27, 55, 79, 93, 98, 102, 110, 116, 133, 139, 145, 150, 152, 156, 163, 174, 181, 185], "problem": [2, 14, 17, 21, 28, 38, 39, 43, 48, 49, 55, 59, 63, 72, 73, 78, 83, 84, 86, 88, 90, 92, 94, 97, 98, 99, 101, 105, 106, 107, 115, 125, 130, 132, 133, 136, 138, 139, 140, 141, 142, 145, 150, 152, 153, 156, 157, 158, 159, 161, 162, 170, 171, 177, 185], "where": [2, 5, 28, 39, 41, 42, 43, 59, 68, 72, 76, 81, 86, 88, 92, 94, 97, 99, 101, 103, 107, 108, 113, 119, 122, 125, 130, 132, 133, 136, 137, 140, 141, 145, 150, 152, 154, 156, 157, 161, 179, 185], "goal": [2, 24, 35, 73, 76, 77, 78, 82, 83, 86, 87, 88, 89, 90, 94, 117, 118, 119, 125, 145, 148, 149, 155, 156, 185], "take": [2, 15, 28, 46, 73, 76, 79, 80, 81, 85, 89, 91, 93, 94, 96, 98, 100, 101, 104, 105, 106, 107, 110, 128, 134, 140, 142, 150, 153, 154, 157, 165, 177, 179], "finit": [2, 63, 68, 73, 84], "valu": [2, 12, 14, 17, 23, 27, 28, 35, 36, 39, 41, 43, 46, 48, 59, 63, 65, 68, 72, 73, 77, 79, 82, 84, 85, 86, 88, 91, 94, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 125, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 145, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 160, 161, 162, 163, 164, 173, 174, 177, 179, 181, 182, 185], "exampl": [2, 5, 10, 13, 22, 33, 39, 48, 57, 70, 71, 73, 76, 78, 81, 83, 84, 85, 90, 91, 94, 99, 100, 102, 103, 105, 109, 110, 116, 119, 125, 133, 134, 136, 139, 140, 145, 147, 150, 151, 161, 163, 165, 171, 182, 183], "ar": [2, 12, 13, 14, 15, 16, 17, 21, 22, 24, 26, 27, 28, 32, 33, 35, 36, 38, 39, 42, 43, 46, 48, 52, 55, 57, 59, 65, 68, 70, 71, 72, 73, 74, 75, 76, 77, 79, 80, 81, 82, 84, 85, 88, 89, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 115, 116, 117, 118, 119, 120, 123, 125, 126, 127, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 145, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 161, 162, 163, 164, 170, 171, 173, 177, 179, 181, 182, 183, 185], "iri": [2, 99], "setosa": 2, "versicolor": 2, "virginica": 2, "from": [2, 5, 13, 17, 23, 24, 28, 35, 36, 43, 46, 48, 55, 57, 59, 73, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 130, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 172, 174, 177, 179, 181, 182, 185], "petal": 2, "sepal": 2, "measur": [2, 5, 17, 28, 65, 72, 74, 75, 76, 79, 80, 105, 108, 122, 125, 128, 134, 140, 142, 152, 157, 158], "whether": [2, 24, 63, 72, 73, 76, 81, 85, 87, 89, 90, 92, 97, 101, 102, 103, 106, 108, 142, 147, 150, 152, 156, 160, 161, 164], "patient": [2, 5, 24, 129, 135], "ha": [2, 24, 36, 42, 43, 46, 63, 70, 72, 73, 79, 80, 81, 83, 84, 85, 89, 94, 98, 101, 102, 104, 107, 108, 109, 110, 116, 117, 122, 129, 130, 132, 133, 135, 136, 137, 138, 140, 142, 143, 145, 146, 149, 151, 154, 156, 157, 163, 177, 179, 185], "particular": [2, 5, 26, 70, 73, 76, 77, 79, 80, 82, 85, 89, 91, 96, 97, 101, 103, 119, 132, 137, 141, 142, 145, 150, 152, 153, 156, 163], "diseas": [2, 5, 24, 73, 129, 135], "result": [2, 28, 65, 73, 76, 78, 79, 80, 81, 83, 84, 85, 88, 89, 91, 93, 94, 96, 98, 99, 100, 101, 102, 103, 110, 113, 114, 115, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 132, 133, 135, 136, 137, 138, 139, 142, 143, 145, 146, 150, 151, 152, 154, 156, 157, 163, 165, 178, 179, 181, 185], "medic": [2, 5, 24, 73], "email": 2, "spam": 2, "content": [2, 79, 85, 104, 147, 150], "sender": 2, "titl": [2, 91, 94, 95, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 115, 122, 123, 125, 126, 131, 132, 133, 135, 137, 138, 139, 140, 141, 142, 146, 153, 157, 161, 162, 163, 164], "etc": [2, 24, 28, 35, 73, 84, 96, 105, 118, 129, 135], "when": [2, 5, 15, 16, 17, 21, 22, 26, 27, 28, 34, 39, 43, 52, 57, 59, 65, 72, 73, 76, 79, 80, 81, 84, 85, 86, 88, 92, 94, 95, 96, 97, 98, 99, 100, 101, 104, 105, 106, 108, 109, 113, 115, 116, 117, 119, 122, 123, 124, 125, 126, 127, 129, 132, 133, 135, 136, 137, 138, 140, 141, 142, 147, 150, 151, 152, 154, 157, 158, 174, 179, 182, 183], "two": [2, 12, 13, 28, 36, 46, 63, 65, 72, 73, 74, 75, 79, 80, 81, 84, 85, 94, 96, 98, 99, 101, 102, 103, 104, 106, 108, 115, 117, 118, 125, 126, 127, 128, 132, 133, 134, 136, 137, 138, 139, 141, 142, 148, 150, 152, 153, 154, 155, 156, 157, 158, 160, 162, 163, 164, 177, 185], "call": [2, 12, 13, 28, 43, 57, 59, 65, 68, 72, 73, 79, 80, 81, 84, 85, 86, 88, 91, 92, 95, 97, 98, 101, 102, 106, 109, 115, 116, 117, 118, 119, 127, 129, 132, 133, 135, 138, 139, 140, 141, 142, 143, 145, 146, 147, 148, 150, 152, 154, 155, 182], "binari": [2, 5, 41, 59, 73, 90, 98, 141, 142, 172, 185], "abov": [2, 5, 17, 28, 46, 72, 73, 77, 81, 82, 90, 96, 99, 101, 102, 103, 107, 108, 110, 115, 116, 128, 130, 133, 134, 135, 136, 140, 141, 142, 143, 145, 146, 150, 152, 156, 158, 160, 162, 163, 164, 177, 179, 185], "least": [2, 99, 117, 137, 161, 185], "three": [2, 76, 85, 98, 99, 102, 107, 108, 109, 112, 121, 132, 157, 158], "multi": [2, 103, 125, 129, 135, 141, 153], "below": [2, 17, 46, 59, 76, 83, 84, 90, 107, 108, 116, 122, 128, 134, 140, 142, 145, 154, 158, 160, 163, 164, 177, 179, 181, 185], "illustr": [2, 32, 57, 76, 80, 81, 84, 93, 96, 98, 99, 101, 108, 119, 132, 139, 142, 152, 157, 158, 161, 162], "provid": [2, 36, 72, 79, 81, 84, 85, 101, 103, 105, 107, 111, 114, 115, 116, 120, 123, 127, 131, 133, 136, 137, 138, 140, 141, 142, 143, 146, 147, 150, 152, 185], "user": [2, 5, 76, 85, 90, 102, 117, 118, 119, 132, 133, 135, 141, 145, 150, 154, 182, 185], "contain": [2, 17, 28, 42, 46, 59, 63, 65, 72, 73, 76, 79, 80, 84, 85, 86, 88, 94, 99, 103, 104, 105, 106, 107, 112, 119, 121, 124, 126, 127, 133, 139, 140, 141, 142, 150, 154, 160, 162, 164, 177, 185], "2": [2, 3, 17, 27, 28, 36, 41, 42, 46, 48, 65, 70, 73, 75, 76, 79, 80, 81, 82, 84, 85, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 113, 115, 116, 117, 118, 119, 120, 122, 123, 125, 126, 127, 129, 130, 132, 133, 134, 135, 136, 139, 140, 141, 142, 144, 145, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 163, 165, 177, 185], "repres": [2, 5, 63, 65, 68, 72, 73, 79, 80, 81, 84, 85, 99, 103, 104, 106, 108, 115, 116, 128, 133, 134, 139, 140, 141, 142, 145, 157, 163], "x": [2, 23, 27, 28, 41, 42, 65, 73, 79, 80, 81, 103, 105, 107, 108, 109, 110, 115, 121, 128, 129, 131, 132, 134, 135, 137, 138, 139, 140, 141, 149, 150, 153, 156, 157, 158, 161, 162, 163, 164, 179], "y": [2, 27, 28, 41, 65, 73, 79, 80, 81, 99, 107, 108, 109, 110, 115, 121, 128, 131, 132, 134, 137, 138, 139, 140, 141, 145, 153, 157, 158, 161, 162, 163, 164, 179], "axi": [2, 17, 28, 75, 91, 98, 99, 103, 104, 107, 108, 110, 123, 125, 126, 132, 133, 137, 139, 140, 142, 145, 150, 153, 154, 156, 157, 161, 179], "becaus": [2, 5, 17, 72, 73, 76, 77, 80, 81, 82, 84, 85, 86, 88, 98, 99, 100, 101, 102, 108, 110, 119, 122, 125, 132, 133, 139, 143, 145, 146, 147, 151, 152, 154, 156, 158, 162], "onli": [2, 5, 17, 28, 36, 41, 46, 48, 63, 65, 68, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 92, 93, 95, 96, 97, 98, 99, 101, 103, 105, 106, 109, 115, 117, 119, 125, 127, 129, 130, 131, 132, 133, 135, 136, 137, 140, 141, 142, 143, 145, 146, 150, 151, 152, 156, 157, 161, 174, 177, 181, 183], "here": [2, 28, 59, 73, 74, 76, 77, 78, 79, 80, 81, 84, 85, 86, 87, 88, 91, 92, 93, 97, 100, 102, 103, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 124, 126, 127, 128, 129, 130, 131, 133, 134, 136, 137, 139, 140, 141, 142, 143, 144, 148, 149, 150, 151, 152, 155, 158, 159, 160, 162], "encod": [2, 5, 28, 46, 67, 68, 79, 85, 86, 88, 103, 104, 105, 107, 116, 119, 130, 132, 133, 136, 141, 165, 177], "color": [2, 73, 96, 103, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 134, 136, 137, 138, 139, 140, 141, 142, 146, 153, 156, 157, 161, 162, 163, 164, 179], "blue": [2, 73, 76, 94, 101, 103, 109, 110, 131, 132, 136, 137, 141, 142, 152, 157, 161, 163], "orang": [2, 73, 110, 115, 142, 157, 162, 163], "point": [2, 5, 14, 73, 77, 79, 81, 82, 89, 94, 99, 105, 106, 107, 108, 110, 116, 117, 119, 122, 132, 133, 137, 139, 141, 142, 145, 152, 154, 157, 161, 163], "thu": [2, 5, 28, 36, 60, 79, 81, 85, 92, 94, 96, 97, 100, 101, 102, 103, 105, 106, 107, 108, 109, 113, 116, 117, 118, 119, 122, 124, 127, 132, 133, 140, 141, 145, 150, 151, 152, 153, 156, 157, 161, 162, 177], "each": [2, 5, 17, 24, 28, 35, 39, 46, 48, 65, 68, 73, 74, 75, 76, 79, 80, 81, 84, 85, 93, 94, 96, 98, 99, 100, 101, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 125, 126, 128, 132, 133, 134, 139, 140, 141, 142, 148, 150, 151, 152, 154, 155, 156, 157, 158, 160, 161, 162, 163, 164, 173, 177, 179, 185], "entir": [2, 34, 76, 84, 85, 106, 107, 117, 122, 124, 127, 137, 177], "wa": [2, 73, 80, 81, 89, 90, 94, 96, 97, 99, 101, 104, 107, 109, 110, 115, 119, 125, 128, 133, 134, 137, 139, 142, 152, 157, 162, 163], "linear": [2, 5, 12, 14, 15, 28, 36, 38, 39, 41, 42, 43, 46, 52, 57, 73, 79, 81, 84, 85, 86, 87, 88, 89, 92, 94, 97, 103, 107, 110, 118, 126, 128, 129, 130, 134, 135, 136, 144, 145, 147, 151, 157, 158, 160, 162, 164, 172, 174, 177, 182], "decis": [2, 5, 9, 12, 14, 15, 17, 26, 34, 41, 57, 81, 85, 87, 89, 91, 95, 100, 101, 102, 109, 110, 112, 113, 116, 118, 119, 120, 121, 122, 126, 132, 139, 141, 142, 143, 146, 158, 159, 160, 163, 164, 170, 171, 172, 173, 174, 175, 177], "rule": [2, 79, 80, 118, 132, 133, 136, 137, 141, 157, 173], "black": [2, 73, 79, 84, 91, 94, 96, 98, 101, 102, 104, 105, 106, 107, 109, 110, 115, 121, 123, 125, 126, 133, 134, 136, 138, 139, 140, 146, 150, 154, 161, 162, 164], "dot": 2, "line": [2, 28, 42, 68, 73, 79, 103, 108, 110, 115, 129, 132, 135, 137, 139, 141, 142, 143, 145, 146, 153, 154, 157, 161, 162, 179, 185], "new": [2, 5, 28, 35, 39, 46, 73, 77, 79, 80, 81, 82, 84, 85, 86, 88, 92, 93, 94, 95, 97, 98, 100, 101, 108, 109, 112, 113, 115, 117, 121, 122, 129, 135, 136, 139, 142, 148, 150, 151, 155, 160, 162, 163, 164], "accord": [2, 73, 137], "its": [2, 5, 17, 22, 28, 57, 60, 73, 78, 80, 81, 83, 84, 85, 92, 96, 97, 98, 100, 101, 102, 103, 108, 111, 115, 117, 119, 120, 125, 130, 131, 132, 133, 136, 137, 138, 140, 141, 142, 143, 145, 146, 149, 156, 157, 179], "posit": [2, 5, 26, 27, 28, 43, 46, 65, 102, 103, 108, 110, 118, 131, 133, 137, 140, 141, 142, 143, 145, 146, 154, 185], "respect": [2, 28, 41, 42, 84, 85, 89, 92, 97, 98, 100, 102, 117, 119, 132, 133, 135, 140, 141, 145, 147], "ly": [2, 133], "left": [2, 72, 73, 80, 91, 94, 98, 99, 100, 105, 107, 108, 109, 110, 115, 117, 118, 121, 127, 131, 134, 136, 137, 142, 152, 153, 157, 161, 163], "while": [2, 17, 28, 79, 80, 81, 84, 97, 101, 102, 103, 105, 107, 108, 109, 110, 115, 116, 117, 119, 122, 125, 127, 129, 130, 132, 133, 135, 136, 137, 142, 145, 153, 156, 161, 177], "right": [2, 73, 74, 75, 79, 84, 86, 88, 98, 108, 117, 122, 124, 127, 137, 142, 152, 161], "defin": [2, 17, 28, 35, 46, 55, 65, 72, 76, 81, 84, 85, 86, 88, 90, 93, 94, 98, 99, 100, 108, 109, 114, 123, 128, 129, 131, 132, 133, 134, 135, 137, 139, 140, 142, 148, 149, 150, 153, 154, 155, 156, 157, 177, 179, 181, 182, 185], "higher": [2, 5, 26, 38, 42, 82, 83, 85, 96, 101, 104, 105, 108, 109, 118, 132, 133, 136, 137, 138, 147, 152, 179], "dimens": [2, 42, 73, 103, 132, 136, 156], "would": [2, 17, 28, 48, 73, 74, 75, 76, 78, 79, 80, 81, 83, 84, 86, 88, 96, 97, 98, 99, 100, 101, 104, 106, 107, 108, 110, 115, 116, 117, 119, 122, 125, 129, 132, 133, 135, 137, 138, 139, 140, 141, 142, 143, 145, 146, 149, 152, 156, 157, 161, 162], "hyperplan": 2, "howev": [2, 5, 21, 35, 79, 80, 81, 84, 85, 87, 89, 91, 94, 96, 98, 99, 101, 102, 104, 105, 106, 108, 109, 110, 113, 115, 116, 117, 118, 119, 122, 124, 125, 127, 132, 133, 134, 138, 139, 141, 142, 143, 145, 146, 147, 152, 154, 156, 157, 161, 162, 182], "depend": [2, 17, 21, 26, 27, 28, 36, 39, 76, 81, 84, 85, 96, 98, 100, 107, 108, 110, 117, 125, 129, 132, 133, 135, 137, 141, 142, 147, 150, 154, 157, 158], "A": [2, 5, 28, 35, 43, 52, 60, 63, 65, 73, 81, 85, 94, 96, 100, 101, 107, 116, 117, 133, 136, 138, 139, 142, 145, 156, 162, 172, 173, 177, 183], "These": [2, 5, 22, 73, 76, 80, 84, 104, 119, 142, 151, 162, 182], "handl": [2, 70, 71, 79, 84, 85, 89, 104, 115, 139, 150, 165], "discret": [2, 84, 101, 106, 116, 141, 145], "1": [2, 3, 17, 26, 28, 41, 42, 43, 46, 48, 59, 65, 73, 75, 76, 79, 80, 81, 82, 84, 85, 87, 88, 89, 90, 91, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 142, 143, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 162, 163, 165, 177, 179, 185], "cat": [2, 90], "dog": 2, "logisticregress": [2, 43, 72, 76, 78, 79, 81, 83, 84, 85, 86, 88, 90, 94, 98, 99, 127, 130, 131, 132, 136, 137, 141, 142, 147, 151, 157, 179, 181], "histgradientboostingclassifi": [2, 85, 87, 89, 116, 148, 150, 152, 153, 154, 155, 181], "note": [2, 7, 8, 35, 45, 46, 47, 49, 51, 53, 61, 66, 73, 76, 79, 80, 81, 84, 85, 89, 91, 92, 96, 97, 98, 101, 102, 106, 107, 108, 110, 118, 119, 137, 140, 141, 150, 151, 152, 156, 157, 162, 166, 177], "histor": 2, "reason": [2, 5, 17, 46, 57, 73, 75, 81, 87, 89, 96, 100, 101, 105, 108, 115, 117, 127, 131, 132, 137, 142, 156, 161], "confus": [2, 115, 151, 162, 182], "contrari": [2, 73, 84, 101, 108, 162], "what": [2, 5, 14, 23, 26, 27, 28, 34, 41, 42, 46, 48, 49, 57, 59, 72, 73, 74, 75, 77, 78, 79, 81, 82, 83, 85, 93, 96, 98, 99, 100, 101, 102, 108, 118, 119, 130, 131, 132, 135, 136, 137, 138, 140, 141, 142, 151, 152, 157, 162, 177, 179], "suggest": 2, "procedur": [2, 5, 17, 46, 76, 79, 96, 100, 101, 110, 117, 119, 122, 125, 126, 139, 152, 156, 172, 179, 181], "how": [2, 17, 21, 22, 25, 28, 32, 33, 38, 41, 46, 55, 57, 63, 68, 72, 73, 74, 75, 76, 78, 79, 80, 81, 83, 84, 85, 90, 91, 95, 97, 99, 100, 102, 103, 108, 109, 110, 115, 117, 119, 129, 131, 132, 133, 135, 137, 138, 142, 143, 146, 150, 151, 152, 153, 154, 156, 157, 159, 161, 162, 163, 170, 175, 179, 181, 182], "well": [2, 5, 12, 14, 21, 65, 75, 81, 94, 98, 99, 102, 106, 107, 108, 109, 110, 115, 117, 118, 126, 137, 142, 151, 154, 161], "idea": [2, 73, 90, 97, 108, 115, 152], "behind": [2, 5, 12, 21, 118], "dataset": [2, 15, 17, 24, 28, 34, 36, 38, 39, 41, 42, 46, 52, 55, 59, 62, 63, 65, 68, 70, 72, 74, 75, 76, 77, 78, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 165, 177, 179, 185], "evalu": [2, 17, 21, 22, 25, 28, 36, 43, 46, 55, 59, 64, 65, 73, 78, 79, 80, 83, 86, 87, 88, 89, 90, 91, 92, 94, 96, 97, 99, 100, 101, 106, 108, 110, 111, 113, 115, 116, 120, 122, 126, 127, 130, 133, 136, 138, 143, 144, 145, 146, 147, 148, 149, 150, 151, 154, 155, 156, 162, 177, 178, 182, 185], "separ": [2, 28, 41, 43, 46, 63, 73, 75, 78, 79, 83, 85, 98, 104, 105, 106, 132, 136, 137, 141, 152, 157, 161, 163, 173, 177], "repeat": [2, 17, 28, 46, 57, 59, 76, 78, 83, 86, 88, 94, 95, 96, 99, 101, 125, 131, 137, 143, 146, 149, 156, 159, 160, 162, 163, 164, 185], "sever": [2, 12, 15, 22, 65, 76, 94, 96, 101, 108, 109, 110, 113, 115, 116, 118, 119, 122, 128, 132, 134, 137, 139, 154], "time": [2, 17, 28, 46, 65, 73, 76, 79, 81, 84, 85, 86, 87, 88, 89, 90, 94, 96, 99, 100, 101, 102, 105, 106, 108, 110, 113, 115, 116, 117, 118, 119, 122, 125, 130, 133, 136, 137, 141, 142, 150, 153, 154, 157, 159, 163, 165, 172, 177], "get": [2, 5, 17, 33, 41, 46, 52, 59, 65, 73, 74, 75, 76, 77, 79, 80, 81, 82, 85, 86, 87, 88, 89, 94, 95, 96, 99, 100, 101, 102, 105, 107, 109, 110, 113, 114, 115, 122, 123, 124, 125, 127, 130, 133, 136, 137, 139, 142, 143, 144, 145, 146, 147, 150, 152, 153, 154, 156, 157, 160, 161, 162, 164, 165, 179, 180, 181], "s": [2, 3, 5, 16, 22, 24, 25, 28, 34, 35, 41, 46, 72, 73, 76, 77, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 115, 117, 118, 120, 121, 125, 126, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 145, 147, 150, 151, 152, 153, 154, 157, 158, 161, 162, 182, 185], "uncertainti": [2, 76, 85, 110, 124, 127, 152], "see": [2, 7, 8, 38, 45, 47, 49, 51, 53, 61, 66, 73, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 91, 94, 95, 97, 98, 99, 100, 103, 104, 105, 106, 107, 108, 109, 110, 115, 116, 118, 120, 122, 123, 125, 127, 132, 133, 135, 136, 138, 139, 140, 141, 142, 145, 147, 150, 151, 152, 153, 154, 156, 157, 158, 159, 161, 162, 163, 166], "more": [2, 17, 28, 35, 42, 43, 46, 52, 55, 59, 73, 76, 79, 80, 81, 84, 86, 88, 89, 91, 92, 95, 96, 97, 98, 102, 103, 104, 105, 107, 108, 109, 110, 115, 116, 117, 118, 119, 125, 129, 130, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 150, 151, 152, 153, 154, 156, 157, 158, 161, 162, 185], "detail": [2, 12, 17, 28, 38, 60, 73, 76, 77, 79, 80, 82, 84, 85, 86, 88, 92, 94, 97, 103, 105, 107, 108, 115, 117, 118, 132, 139, 141, 142, 143, 146, 147, 150, 158, 170, 171, 185], "n_sampl": [2, 110, 115, 125, 126, 132, 139, 141, 161], "row": [2, 17, 59, 63, 73, 84, 90, 101, 104, 107, 110, 129, 130, 133, 135, 136, 139, 150, 153, 154, 156, 185], "n_featur": [2, 42, 117, 119, 125, 126, 132, 139, 141], "column": [2, 5, 17, 28, 36, 41, 42, 46, 59, 63, 68, 72, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 92, 93, 97, 98, 100, 101, 103, 104, 105, 106, 107, 108, 110, 115, 117, 119, 120, 121, 123, 125, 126, 129, 130, 132, 133, 135, 136, 139, 140, 142, 143, 144, 145, 146, 147, 148, 150, 152, 153, 154, 155, 156, 161, 162, 164, 177, 185], "equal": [2, 5, 28, 41, 43, 59, 81, 99, 115, 133, 150, 151, 162], "flower": 2, "4": [2, 3, 17, 28, 72, 73, 75, 79, 80, 81, 82, 83, 84, 87, 89, 90, 91, 94, 97, 98, 99, 101, 104, 105, 106, 107, 108, 110, 115, 116, 117, 118, 120, 123, 125, 129, 130, 131, 132, 133, 135, 136, 137, 139, 140, 142, 147, 150, 151, 152, 153, 154, 156, 158, 161, 163, 165, 177], "length": [2, 5, 17, 28, 74, 75, 79, 80, 104, 105, 109, 112, 121, 128, 129, 131, 134, 135, 137, 138, 140, 141, 142, 150, 154, 157, 158, 159, 160, 161, 162, 163, 164, 185], "width": [2, 116], "common": [2, 57, 76, 84, 100, 110, 133, 136, 145, 163, 185], "math": [2, 104], "matric": [2, 84], "capit": [2, 73, 76, 78, 79, 80, 81, 83, 84, 85, 103, 130, 136, 150, 151, 154], "letter": [2, 101, 107], "f": [2, 7, 8, 17, 28, 45, 47, 49, 51, 53, 61, 66, 73, 76, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 94, 96, 97, 99, 100, 101, 107, 108, 109, 110, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 131, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 145, 146, 147, 150, 151, 152, 154, 155, 157, 161, 163, 166, 179], "iter": [2, 28, 65, 76, 81, 84, 99, 100, 101, 113, 117, 122, 133, 136, 152, 153, 154], "optim": [2, 21, 22, 43, 63, 81, 92, 96, 97, 109, 114, 115, 116, 118, 123, 127, 129, 133, 135, 138, 142, 145, 147, 150, 151, 152, 154, 156, 161, 177, 183, 185], "befor": [2, 43, 46, 73, 77, 79, 81, 82, 84, 99, 101, 103, 104, 108, 116, 119, 124, 127, 133, 140, 142, 145], "converg": [2, 65, 81, 84], "algorithm": [2, 12, 15, 17, 23, 34, 73, 80, 81, 84, 94, 108, 109, 113, 115, 116, 117, 119, 122, 124, 125, 127, 129, 135, 139, 141, 157, 158], "avoid": [2, 5, 39, 46, 72, 73, 96, 108, 109, 110, 113, 121, 122, 127, 129, 130, 135, 136, 137, 138, 139, 150, 153, 154, 156], "over": [2, 14, 23, 28, 38, 39, 73, 92, 94, 96, 97, 100, 102, 108, 112, 116, 118, 121, 145, 148, 150, 155, 161, 170, 173], "done": [2, 5, 72, 80, 88, 116, 117, 119, 127, 129, 132, 133, 135, 151, 152, 154, 157, 161], "monitor": [2, 5, 28], "score": [2, 17, 26, 27, 28, 36, 46, 57, 59, 65, 72, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 96, 97, 98, 101, 102, 107, 108, 113, 114, 115, 116, 117, 118, 120, 122, 123, 124, 125, 126, 127, 130, 131, 133, 135, 136, 137, 139, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 157, 163, 175, 177, 179, 185], "jargon": [2, 80, 81], "onc": [2, 5, 17, 28, 79, 81, 89, 94, 99, 101, 103, 110, 117, 119, 141, 142, 144, 147, 148, 149, 150, 153, 154, 155, 156, 179], "quantiti": [2, 5, 84, 85, 105, 108], "size": [2, 48, 55, 57, 58, 84, 91, 92, 97, 105, 107, 109, 110, 117, 127, 132, 157, 165], "weight": [2, 28, 39, 41, 43, 46, 79, 92, 94, 97, 108, 109, 115, 128, 129, 133, 134, 135, 138, 139, 140, 141, 157, 172], "dure": [2, 12, 21, 24, 28, 32, 33, 38, 39, 55, 57, 71, 72, 73, 79, 84, 86, 88, 99, 101, 105, 109, 118, 125, 133, 138, 143, 146, 150, 152, 154, 157, 160, 162, 164, 170, 171, 182, 183], "collect": [2, 5, 28, 73, 76, 93, 98, 101, 103, 118, 125, 147], "four": [2, 105, 106, 142], "never": [2, 17, 55, 73, 79, 80, 84, 109, 110, 136, 137, 145, 150, 154, 163, 179], "seen": [2, 24, 76, 78, 80, 81, 83, 84, 101, 130, 136, 137, 140, 142, 150, 151, 152, 160, 164], "aspect": [2, 5, 21, 33, 76, 81, 95, 126, 133, 172], "configur": [2, 81, 114, 123, 139, 150], "learnt": [2, 57, 95, 108, 139, 157], "k": [2, 24, 59, 76, 79, 80, 81, 91, 95, 101, 102, 109, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 125, 126, 127, 145, 147, 149, 150, 152, 153, 156, 163, 182], "nearest": [2, 59, 79, 80, 81, 102, 182, 185], "neighbor": [2, 59, 77, 79, 80, 81, 82, 102, 149, 156, 182, 185], "approach": [2, 5, 13, 22, 33, 39, 46, 57, 71, 72, 84, 99, 103, 116, 132, 138, 141, 150, 152, 154, 156, 171, 183], "polynomi": [2, 42, 52, 102, 110, 132, 139], "sai": [2, 59, 108, 131, 137, 157], "degre": [2, 42, 46, 52, 102, 108, 110, 129, 130, 132, 133, 135, 136, 139], "between": [2, 5, 12, 14, 17, 21, 28, 39, 46, 52, 55, 57, 59, 73, 75, 76, 79, 81, 84, 92, 96, 97, 100, 102, 103, 105, 106, 108, 110, 112, 115, 117, 121, 124, 125, 127, 129, 131, 132, 133, 134, 135, 137, 139, 140, 141, 142, 145, 153, 154, 156, 158, 161, 162, 170, 185], "10": [2, 3, 17, 24, 35, 41, 46, 59, 72, 73, 76, 77, 79, 82, 84, 89, 91, 93, 94, 95, 96, 97, 98, 101, 102, 104, 105, 106, 107, 108, 110, 113, 116, 117, 118, 120, 122, 127, 129, 130, 131, 132, 133, 135, 136, 137, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 161, 163, 177, 179, 185], "impact": [2, 17, 43, 46, 55, 84, 86, 87, 88, 89, 95, 96, 102, 103, 113, 117, 122, 130, 133, 136, 145, 153, 156, 161, 179, 181, 182, 183, 185], "comput": [2, 5, 28, 46, 59, 65, 73, 76, 77, 80, 81, 82, 84, 85, 87, 89, 91, 92, 93, 95, 96, 97, 98, 99, 101, 102, 108, 109, 110, 113, 115, 116, 117, 118, 122, 124, 125, 127, 129, 130, 132, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 152, 153, 154, 156, 157, 159, 162, 163, 179], "inde": [2, 17, 28, 73, 79, 80, 81, 85, 89, 92, 94, 96, 97, 100, 101, 103, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 118, 124, 125, 126, 127, 131, 132, 133, 135, 137, 138, 139, 141, 142, 143, 145, 146, 147, 151, 152, 154, 157, 158, 161, 162, 163], "usual": [2, 16, 79, 81, 85, 94, 100, 102, 105, 129, 135, 142, 153, 154], "inspect": [2, 5, 28, 59, 63, 81, 90, 101, 103, 109, 114, 117, 123, 126, 130, 131, 132, 136, 137, 139, 141, 150, 153, 154, 157, 159, 161, 163, 185], "regard": [2, 12, 13, 21, 23, 32, 33, 38, 41, 70, 72, 81, 84, 86, 88, 91, 92, 94, 95, 97, 99, 102, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 148, 155, 157, 159, 160, 161, 162, 163, 164], "tune": [2, 12, 16, 25, 34, 39, 42, 43, 46, 92, 96, 97, 102, 103, 111, 118, 119, 120, 125, 127, 131, 132, 137, 149, 153, 155, 156, 171, 177, 182, 185], "maxim": [2, 46, 94, 96, 133, 147, 148, 149, 150, 154, 155, 156, 157, 175, 182], "involv": [2, 5, 73, 98, 101, 151], "grid": [2, 96, 105, 114, 117, 118, 123, 133, 149, 152, 154, 156, 161, 165, 175, 177, 178, 179, 183, 185], "search": [2, 46, 95, 96, 114, 116, 117, 118, 119, 120, 123, 133, 138, 148, 149, 152, 155, 156, 161, 165, 175, 177, 178, 179, 182, 183, 185], "random": [2, 10, 12, 13, 14, 16, 17, 36, 46, 50, 72, 79, 94, 98, 99, 100, 101, 105, 107, 108, 110, 112, 113, 115, 116, 118, 121, 122, 124, 125, 126, 127, 132, 139, 149, 150, 153, 156, 165, 173, 178, 179, 183], "further": [2, 38, 73, 94, 95, 132, 139, 140, 152, 161, 163], "read": [2, 5, 73, 94, 105, 108, 145, 185], "post": [2, 48, 68, 171], "machin": [2, 22, 28, 33, 35, 39, 55, 57, 63, 68, 71, 73, 79, 80, 81, 83, 84, 85, 86, 88, 90, 92, 93, 96, 97, 98, 99, 100, 101, 110, 119, 124, 125, 126, 127, 132, 133, 136, 138, 139, 141, 142, 145, 150, 154], "mooc": [2, 73, 80, 90, 91, 92, 95, 97, 101, 102, 103, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164], "refer": [2, 13, 22, 33, 39, 57, 71, 72, 73, 80, 81, 86, 88, 91, 92, 93, 94, 95, 97, 98, 101, 102, 107, 109, 110, 111, 112, 113, 115, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 151, 157, 159, 160, 161, 162, 163, 164, 171, 183], "process": [2, 5, 35, 57, 65, 71, 72, 73, 85, 100, 110, 116, 124, 125, 127, 133, 147, 151, 152, 182], "make": [2, 5, 23, 24, 28, 43, 46, 50, 55, 59, 60, 70, 72, 73, 76, 77, 79, 81, 82, 84, 85, 86, 88, 89, 90, 91, 92, 94, 96, 97, 98, 100, 101, 102, 104, 105, 107, 108, 109, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 124, 125, 126, 127, 129, 130, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 147, 148, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 172], "appli": [2, 5, 14, 15, 16, 17, 23, 25, 26, 28, 38, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 81, 84, 85, 90, 99, 102, 104, 105, 107, 132, 140, 147, 150, 151, 153, 156, 161, 163, 172, 173, 175, 179, 181, 185], "unlabel": 2, "word": [2, 28, 138, 160, 164, 179], "equival": [2, 17, 28, 46, 72, 73, 80, 92, 97, 101, 117, 131, 137, 139, 141, 142, 156, 177], "unseen": [2, 65, 73, 99, 100, 117, 157], "notion": 2, "out": [2, 5, 24, 72, 73, 76, 79, 80, 84, 93, 95, 98, 100, 101, 108, 110, 113, 114, 117, 118, 122, 123, 127, 131, 133, 136, 137, 142, 150, 152, 159, 160, 163, 164], "ti": 2, "definit": [2, 139, 151], "distribut": [2, 5, 42, 73, 74, 75, 81, 93, 94, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 116, 132, 133, 141, 145, 147, 149, 152, 154, 156, 158, 162, 179], "condit": [2, 5, 108, 133, 141], "check": [2, 5, 28, 46, 59, 73, 76, 79, 80, 81, 83, 84, 85, 92, 94, 95, 97, 99, 100, 101, 102, 103, 105, 107, 109, 110, 111, 112, 114, 115, 116, 118, 119, 120, 121, 123, 124, 125, 126, 127, 129, 133, 135, 138, 139, 140, 142, 143, 146, 150, 152, 153, 157, 158, 160, 161, 162, 164, 170, 177], "wikipedia": [2, 57, 110], "articl": [2, 5, 57, 110], "finish": [2, 133], "_": [2, 73, 75, 80, 81, 85, 91, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 120, 121, 122, 123, 125, 126, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 151, 153, 157, 158, 161, 162, 163, 164], "end": [2, 81, 84, 91, 92, 94, 95, 97, 98, 99, 101, 102, 103, 105, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 155, 157, 159, 160, 161, 162, 163, 164], "thei": [2, 5, 28, 46, 55, 65, 73, 77, 79, 81, 82, 84, 85, 89, 98, 99, 102, 104, 107, 108, 110, 116, 117, 119, 132, 133, 139, 142, 145, 151, 157, 162, 164, 171, 177, 182], "avail": [2, 5, 28, 35, 48, 72, 73, 74, 75, 76, 80, 90, 94, 95, 104, 106, 107, 112, 121, 125, 127, 133, 138, 140, 142, 151, 154, 177], "after": [2, 17, 27, 46, 76, 84, 101, 104, 110, 113, 116, 117, 120, 122, 138, 151, 155, 156], "been": [2, 73, 76, 79, 84, 91, 99, 101, 108, 109, 116, 122, 125, 135, 141, 151, 152, 157], "slope": [2, 28, 105, 140], "intercept": [2, 39, 128, 129, 130, 134, 135, 136, 137, 138, 139, 140, 141, 157], "one": [2, 5, 13, 17, 24, 36, 41, 42, 46, 57, 65, 68, 72, 73, 75, 76, 78, 79, 80, 81, 83, 84, 85, 87, 89, 92, 93, 94, 96, 97, 98, 99, 101, 103, 105, 107, 108, 109, 115, 116, 117, 119, 126, 130, 132, 133, 135, 136, 138, 139, 141, 142, 143, 145, 146, 150, 151, 152, 156, 157, 158, 161, 163, 173, 185], "section": [2, 76, 79, 84, 85, 91, 92, 94, 95, 97, 101, 102, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 154, 157, 159, 160, 161, 162, 163, 164], "about": [2, 12, 16, 21, 23, 28, 32, 35, 38, 55, 59, 63, 70, 71, 73, 76, 80, 81, 84, 93, 95, 98, 101, 102, 104, 106, 108, 115, 118, 131, 132, 137, 141, 145, 150, 158, 162, 170, 179, 182, 185], "also": [2, 12, 21, 24, 26, 28, 32, 35, 43, 57, 59, 73, 79, 80, 81, 84, 85, 86, 87, 88, 89, 92, 95, 96, 97, 100, 101, 102, 103, 104, 105, 108, 109, 110, 113, 114, 116, 117, 119, 122, 123, 125, 130, 132, 133, 136, 137, 139, 140, 141, 142, 145, 151, 152, 153, 154, 156, 157, 163], "python": [2, 5, 28, 35, 65, 70, 73, 76, 86, 88, 101, 110, 116, 133, 143, 146], "pass": [2, 17, 28, 46, 59, 65, 72, 76, 84, 85, 86, 88, 92, 96, 97, 101, 110, 140, 143, 144, 146, 147, 149, 150, 152, 156, 157, 163, 185], "anoth": [2, 5, 17, 57, 72, 73, 76, 96, 98, 100, 110, 119, 125, 126, 128, 133, 134, 139, 142, 145, 161], "includ": [2, 28, 33, 35, 63, 84, 96, 104, 125, 126, 129, 130, 133, 135, 136, 137, 139, 151, 174], "gridsearchcv": [2, 5, 96, 118, 123, 149, 150, 152, 154, 156, 161, 177, 179, 185], "someth": [2, 89, 151], "occur": [2, 84, 133], "your": [2, 5, 33, 38, 59, 72, 73, 74, 77, 78, 82, 84, 85, 86, 87, 88, 89, 92, 93, 97, 99, 111, 112, 113, 114, 117, 124, 127, 128, 129, 130, 131, 135, 136, 143, 144, 148, 149, 155, 159, 160, 164, 177, 179], "stick": 2, "too": [2, 5, 60, 80, 90, 96, 101, 102, 113, 116, 117, 119, 122, 132, 133, 135, 137, 150, 156, 161, 179], "so": [2, 5, 13, 28, 46, 65, 73, 75, 81, 83, 84, 86, 88, 91, 92, 97, 98, 99, 100, 101, 104, 105, 108, 109, 114, 115, 117, 123, 129, 132, 133, 135, 137, 138, 139, 142, 143, 146, 151, 152, 156, 163, 177], "up": [2, 5, 9, 34, 43, 73, 76, 79, 84, 94, 96, 98, 101, 103, 107, 108, 125, 128, 134, 139, 140, 142, 150, 162, 165, 182], "nois": [2, 46, 52, 57, 102, 110, 115, 132, 137, 139], "rather": [2, 28, 33, 76, 79, 105, 106, 141, 142, 147, 154, 156], "than": [2, 5, 17, 26, 27, 28, 33, 35, 42, 46, 52, 59, 72, 73, 76, 79, 80, 81, 82, 83, 84, 85, 88, 89, 91, 93, 94, 96, 97, 98, 100, 101, 102, 104, 105, 108, 109, 110, 116, 117, 118, 119, 122, 124, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 137, 141, 142, 145, 147, 149, 150, 152, 153, 154, 156, 157, 160, 161, 164, 174, 177, 179, 185], "relev": [2, 59, 79, 94, 108, 142], "pattern": [2, 22, 73, 94, 124, 127, 143, 146, 152], "tell": [2, 73, 102, 108], "great": [2, 5, 48], "poorli": [2, 88], "real": [2, 48, 73, 79, 80, 84, 85, 99, 101, 128, 134, 142, 145, 162], "world": [2, 162], "fit_predict": 2, "kneighborsclassifi": [2, 59, 77, 80, 82, 185], "decisiontreeregressor": [2, 17, 91, 95, 100, 101, 102, 110, 111, 115, 118, 119, 120, 139, 161, 162, 164, 177], "One": [2, 48, 59, 68, 72, 76, 77, 82, 84, 99, 100, 106, 108, 116, 142, 145, 152], "focu": [2, 17, 73, 79, 101, 104, 107, 109, 115, 133, 142, 145, 150, 151, 157], "were": [2, 28, 65, 76, 79, 85, 94, 101, 102, 106, 115, 124, 127, 128, 134, 142, 154, 156, 161, 164], "If": [2, 5, 27, 28, 36, 41, 52, 65, 73, 76, 80, 84, 91, 92, 94, 95, 96, 97, 100, 101, 102, 107, 108, 109, 111, 112, 113, 116, 117, 118, 119, 120, 121, 122, 125, 128, 129, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 151, 152, 156, 157, 159, 160, 161, 162, 163, 164, 175, 185], "do": [2, 5, 16, 17, 28, 57, 65, 73, 74, 75, 76, 77, 78, 79, 81, 82, 83, 84, 85, 86, 88, 90, 93, 96, 98, 100, 101, 102, 104, 107, 113, 116, 117, 119, 122, 124, 125, 127, 129, 132, 133, 135, 138, 139, 142, 143, 146, 150, 151, 152, 153, 162, 179, 181, 185], "1d": [2, 28, 139], "5": [2, 3, 17, 28, 59, 65, 72, 73, 75, 76, 80, 81, 82, 83, 84, 85, 90, 91, 94, 95, 96, 97, 98, 99, 101, 102, 104, 105, 106, 107, 108, 109, 110, 111, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 126, 129, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 145, 147, 150, 152, 153, 154, 156, 157, 158, 161, 162, 163, 164, 165, 181, 182, 185], "someon": [2, 73], "come": [2, 5, 28, 55, 73, 79, 115, 118, 125, 128, 134, 140, 151, 152, 162], "doe": [2, 5, 17, 23, 28, 35, 46, 73, 76, 81, 84, 85, 87, 88, 89, 91, 94, 95, 96, 97, 99, 100, 105, 108, 117, 120, 131, 132, 133, 137, 141, 145, 147, 150, 154, 156, 157, 162, 179, 183], "15": [2, 3, 73, 79, 81, 84, 91, 102, 104, 114, 116, 117, 120, 123, 142, 154, 156, 157, 163, 177], "continu": [2, 5, 36, 41, 42, 63, 73, 79, 101, 103, 104, 108, 140, 141, 142, 145, 147, 158, 161], "price": [2, 48, 72, 90, 91, 101, 104, 107, 108, 144, 145, 147], "descript": [2, 28, 48, 72, 73, 77, 82, 91, 92, 95, 97, 101, 102, 107, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164, 165], "room": [2, 48, 101, 107, 108], "surfac": [2, 28], "locat": [2, 74, 75, 91, 104, 105, 107, 141, 153], "ag": [2, 73, 76, 78, 79, 80, 81, 83, 84, 85, 101, 103, 107, 108, 130, 133, 136, 150, 151, 154], "mri": 2, "scan": [2, 5, 151], "want": [2, 17, 73, 74, 75, 79, 80, 84, 87, 89, 90, 91, 92, 94, 95, 97, 99, 100, 101, 102, 103, 105, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 125, 126, 128, 129, 131, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 150, 151, 152, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "tree": [2, 5, 9, 12, 14, 15, 16, 17, 57, 73, 81, 84, 85, 87, 89, 91, 95, 100, 101, 102, 108, 109, 110, 112, 113, 114, 116, 118, 119, 120, 121, 122, 123, 126, 132, 139, 143, 146, 148, 150, 154, 155, 158, 159, 160, 163, 164, 170, 171, 172, 173, 174, 175, 177], "piecewis": [2, 132, 162, 174], "constant": [2, 23, 83, 108, 129, 132, 135, 137, 139, 162, 174], "To": [2, 7, 8, 28, 36, 45, 46, 47, 49, 51, 53, 61, 66, 72, 73, 79, 80, 81, 85, 86, 88, 92, 94, 95, 96, 97, 99, 100, 101, 102, 108, 109, 110, 113, 116, 117, 122, 125, 129, 132, 133, 135, 137, 138, 139, 142, 145, 154, 156, 162, 166, 177], "given": [2, 5, 17, 28, 41, 48, 57, 65, 79, 81, 84, 93, 96, 98, 99, 101, 104, 106, 107, 108, 110, 115, 119, 126, 131, 132, 133, 135, 136, 137, 138, 140, 141, 142, 147, 152, 154, 163, 172, 179, 181, 182], "output": [2, 17, 41, 73, 76, 81, 84, 85, 100, 101, 105, 108, 110, 128, 129, 132, 134, 135, 141, 142, 147, 163], "correspond": [2, 17, 28, 46, 73, 76, 79, 84, 85, 86, 88, 94, 96, 101, 102, 104, 105, 106, 107, 108, 110, 112, 121, 135, 140, 141, 142, 150, 151, 153, 154, 162, 174, 177], "ridg": [2, 39, 43, 46, 107, 108, 110, 131, 133, 137], "order": [2, 5, 17, 28, 39, 46, 68, 73, 81, 85, 88, 89, 90, 94, 99, 101, 104, 114, 119, 123, 133, 140, 150, 153, 154, 185], "shrink": [2, 43, 133, 137], "constrain": [2, 39, 43, 60, 102, 132], "toward": [2, 43, 108, 125, 133, 137, 141], "zero": [2, 27, 28, 41, 42, 43, 52, 55, 73, 84, 102, 108, 133, 137, 139, 141], "2d": [2, 132, 139], "singl": [2, 14, 15, 17, 23, 24, 26, 27, 28, 34, 41, 42, 43, 46, 48, 52, 59, 63, 65, 68, 72, 73, 76, 79, 81, 84, 85, 90, 93, 96, 98, 99, 100, 101, 108, 110, 115, 118, 119, 120, 125, 126, 128, 129, 132, 134, 135, 139, 142, 143, 145, 146, 150, 151, 152, 157, 161, 163, 172, 173, 174, 175, 177, 179, 181, 185], "orient": [2, 108, 131, 137, 163], "clf": 2, "give": [2, 5, 13, 21, 23, 28, 32, 52, 55, 57, 70, 73, 76, 81, 83, 84, 85, 94, 95, 96, 99, 101, 105, 106, 108, 109, 110, 115, 116, 117, 118, 119, 125, 131, 133, 137, 138, 139, 141, 142, 145, 152, 156, 157, 163], "concret": [2, 28, 57], "graphic": [2, 65, 85, 99, 107, 140], "plot": [2, 17, 28, 46, 59, 65, 73, 74, 75, 81, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 114, 115, 121, 123, 125, 126, 128, 131, 132, 133, 134, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 164, 179], "compos": [2, 72, 73, 80, 84, 85, 86, 87, 88, 89, 90, 93, 98, 104, 119, 130, 136, 145, 148, 150, 152, 154, 155, 161], "sinc": [2, 73, 76, 81, 92, 96, 97, 98, 99, 101, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 125, 133, 136, 138, 141, 142, 145, 150, 157, 158, 162, 163], "potenti": [2, 43, 48, 73, 81, 85, 95, 101, 102, 116, 138, 142, 152, 181], "choic": [2, 28, 35, 43, 52, 55, 57, 84, 97, 98, 101, 105, 107, 108, 110, 133, 138, 147, 150, 154, 156, 165, 185], "circl": [2, 73, 107, 110, 132], "vs": [2, 81, 84, 131, 137, 142, 163], "squar": [2, 27, 57, 105, 133, 134, 138, 139, 145, 147], "boil": 2, "down": [2, 5, 117], "fact": [2, 17, 81, 83, 89, 103, 105, 115, 133, 154, 164, 177], "exactli": [2, 43, 55, 59, 79, 89, 94, 185], "know": [2, 5, 85, 92, 97, 101, 102, 104, 107, 110, 115, 127, 133, 139, 142, 150], "frame": [2, 104, 105, 106, 107], "scienc": [2, 5, 35, 94, 105], "solv": [2, 5, 28, 43, 59, 73, 79, 85, 94, 97, 99, 101, 105, 106, 133, 138, 139, 140, 141, 145, 162, 185], "might": [2, 5, 28, 36, 48, 72, 83, 84, 85, 87, 89, 94, 95, 99, 100, 101, 102, 105, 108, 117, 134, 142, 145, 150, 154, 163], "speci": [2, 17, 74, 75, 109, 131, 137, 141, 157, 158, 159, 161, 163, 185], "commonli": [2, 73, 79, 80], "denot": 2, "eventu": 2, "ideal": [2, 101, 142, 145], "let": [2, 5, 17, 24, 42, 46, 72, 73, 76, 77, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 110, 115, 117, 125, 126, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 142, 145, 147, 150, 151, 154, 157, 158, 161, 162, 185], "On": [2, 5, 28, 79, 81, 82, 85, 90, 96, 101, 108, 109, 110, 117, 122, 124, 127, 132, 133, 135, 136, 137, 138, 139, 142, 145, 150, 152, 154, 157, 162, 163, 164], "figur": [2, 26, 76, 79, 99, 101, 103, 108, 109, 110, 115, 141, 152, 153, 154, 157, 163, 179], "mathemat": [2, 57, 92, 97, 132, 139, 140, 141, 145], "b": [2, 14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 128, 134, 140, 154, 156, 172, 173, 174, 175, 177, 179, 181, 185], "creat": [2, 28, 43, 46, 59, 68, 70, 71, 72, 76, 77, 79, 81, 82, 84, 85, 92, 93, 95, 97, 98, 99, 102, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 118, 120, 121, 122, 123, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 141, 142, 143, 144, 146, 147, 150, 151, 152, 154, 157, 159, 160, 162, 163, 164, 173, 177, 182], "infin": 2, "vari": [2, 17, 79, 92, 95, 97, 99, 102, 105, 108, 123, 125, 133, 138, 140, 142, 151, 156, 177], "fulfil": 2, "requir": [2, 5, 12, 17, 21, 28, 32, 35, 38, 39, 42, 46, 55, 59, 70, 72, 73, 76, 79, 81, 85, 92, 97, 98, 105, 109, 116, 117, 132, 133, 147, 152, 154, 156, 161, 170, 177, 182, 185], "minim": [2, 5, 39, 57, 63, 91, 96, 101, 102, 137, 138, 145, 147, 157, 173], "sum": [2, 39, 41, 57, 73, 80, 82, 115, 134, 136, 141, 142], "error": [2, 5, 15, 27, 28, 39, 41, 50, 52, 55, 57, 58, 60, 73, 76, 80, 86, 88, 91, 94, 95, 96, 102, 109, 111, 112, 113, 115, 116, 117, 119, 120, 121, 122, 128, 129, 133, 134, 135, 138, 139, 142, 144, 145, 147, 148, 155, 165, 173], "red": [2, 26, 76, 101, 103, 109, 110, 115, 131, 132, 136, 137, 141, 152, 161], "best": [2, 12, 17, 21, 32, 38, 52, 57, 59, 73, 90, 96, 97, 99, 102, 111, 113, 114, 116, 117, 119, 120, 122, 123, 128, 133, 134, 137, 138, 139, 142, 145, 148, 149, 150, 151, 152, 154, 155, 156, 157, 170, 179, 182, 183, 185], "possibl": [2, 5, 14, 28, 35, 41, 42, 57, 59, 63, 68, 73, 81, 84, 91, 95, 96, 99, 101, 102, 103, 108, 110, 116, 117, 119, 127, 128, 129, 132, 133, 134, 135, 137, 139, 141, 145, 147, 149, 150, 152, 153, 156, 159, 160, 162, 163, 164, 179, 185], "abstract": [2, 105], "manner": [2, 13, 28, 84, 94, 133, 147], "state": [2, 5, 13, 59, 73, 79, 80, 81, 84, 85, 107, 136, 150, 154], "jockei": 2, "wheel": 2, "i": [2, 12, 17, 19, 21, 28, 46, 70, 71, 72, 73, 76, 79, 80, 81, 83, 84, 85, 88, 89, 95, 96, 99, 101, 103, 108, 110, 115, 116, 117, 118, 128, 129, 132, 133, 134, 135, 138, 141, 142, 145, 147, 150, 156, 157, 165, 179, 185], "support": [2, 5, 81, 87, 89, 92, 97, 132, 136, 139, 143, 146, 163], "standardscal": [2, 28, 46, 59, 65, 72, 76, 81, 85, 87, 89, 90, 92, 97, 98, 99, 107, 108, 130, 131, 132, 136, 137, 141, 149, 151, 156, 177, 179, 181, 185], "columntransform": [2, 71, 85, 87, 89, 90, 132, 136, 148, 150, 152, 154, 155], "enough": [2, 5, 88, 89, 98, 101, 102, 115, 117, 137, 145, 151, 157, 161, 163, 177], "flexibl": [2, 5, 52, 55, 57, 60, 92, 97, 102, 119, 157], "opposit": 2, "cluster": [2, 101, 103, 172], "whose": [2, 80, 116, 151], "group": [2, 5, 19, 24, 28, 73, 99, 100, 101, 107, 130, 136, 165], "subset": [2, 16, 41, 43, 46, 70, 73, 76, 79, 80, 85, 90, 101, 105, 107, 117, 119, 122, 124, 125, 126, 127, 148, 155, 157, 158, 161, 177], "them": [2, 5, 12, 59, 73, 75, 76, 81, 84, 85, 93, 96, 98, 101, 108, 110, 112, 116, 117, 118, 121, 129, 132, 133, 135, 139, 141, 142, 145, 150, 151, 152, 156, 160, 163, 164, 177, 185], "broad": 2, "topic": [2, 101, 107], "custom": [2, 139], "commerc": 2, "websit": [2, 35, 48, 77, 82], "although": 2, "mention": [2, 48, 76, 92, 97, 100, 104, 109, 116, 118, 131, 132, 133, 137, 138, 141, 142, 143, 145, 146, 151, 154, 164], "cover": [2, 49, 73, 76, 79, 84, 85, 137], "impli": [2, 161], "fix": [2, 28, 46, 52, 57, 84, 92, 97, 117, 133, 149, 150, 156, 161, 163, 177, 179, 183], "like": [2, 5, 17, 24, 28, 43, 52, 73, 79, 80, 84, 85, 86, 88, 96, 98, 101, 104, 106, 108, 109, 110, 117, 125, 133, 134, 139, 140, 141, 142, 143, 146, 147, 151], "necessari": [2, 5, 43, 73, 117, 132, 152], "subdivid": [2, 157], "select": [2, 5, 12, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 28, 32, 33, 34, 38, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 74, 75, 76, 78, 79, 81, 83, 86, 87, 88, 89, 96, 99, 101, 103, 108, 110, 115, 117, 119, 123, 124, 127, 128, 130, 134, 136, 138, 139, 141, 150, 151, 152, 153, 156, 158, 170, 172, 173, 174, 175, 177, 179, 181, 185], "final": [2, 12, 22, 38, 43, 55, 59, 70, 80, 81, 84, 85, 96, 100, 101, 105, 107, 110, 114, 115, 117, 122, 123, 124, 126, 127, 133, 137, 143, 144, 146, 147, 150, 151, 152, 160, 164, 182], "sometim": [2, 5, 57, 85, 98, 142, 145, 152, 154], "clear": [2, 73, 97, 102, 161], "mani": [2, 5, 28, 41, 59, 72, 73, 74, 75, 80, 81, 84, 85, 96, 98, 99, 101, 102, 104, 107, 110, 117, 119, 129, 130, 132, 133, 135, 136, 142, 151, 156, 185], "need": [2, 5, 21, 23, 28, 36, 39, 59, 65, 73, 77, 79, 81, 82, 84, 85, 90, 92, 96, 97, 101, 102, 105, 108, 109, 110, 115, 117, 118, 119, 120, 130, 132, 133, 136, 137, 138, 139, 143, 144, 146, 147, 150, 151, 152, 154, 155, 161, 175, 177], "criteria": [2, 117], "ml": [2, 5, 94], "cheatsheet": 2, "readthedoc": 2, "io": [2, 35], "en": 2, "latest": [2, 27], "googl": 2, "develop": [2, 5, 35, 70, 73, 86, 88, 105, 129, 135, 139], "com": [2, 5, 35], "advanc": [2, 5, 35, 70, 87, 89], "relat": [2, 13, 17, 22, 33, 39, 55, 57, 71, 73, 85, 103, 137, 139, 142, 171, 182, 183], "terminolog": 2, "modifi": [3, 5, 116, 118, 139], "run": [3, 5, 17, 43, 65, 77, 82, 91, 95, 96, 108, 114, 123, 133, 153, 154, 177, 179, 185], "statu": [3, 24, 73, 79, 84, 85, 136, 150, 152, 154], "python_script": 3, "01_tabular_data_explor": 3, "2024": 3, "04": [3, 11, 29, 44, 67, 81, 109, 117, 133, 163, 165, 168], "26": [3, 91, 104, 105, 120], "13": [3, 73, 79, 81, 91, 94, 104, 105, 106, 117, 120, 132, 145, 156], "19": [3, 75, 84, 91, 104, 105, 116, 117, 120, 129, 133, 135, 138, 143, 146, 153, 156, 158], "cach": 3, "7": [3, 17, 46, 72, 73, 75, 81, 82, 84, 91, 94, 97, 98, 99, 101, 104, 105, 107, 108, 116, 117, 120, 129, 133, 135, 142, 147, 150, 153, 154, 156, 158, 161, 163, 165, 177, 185], "52": [3, 101, 104, 105, 107, 108, 120, 125, 130, 136, 157], "01_tabular_data_exploration_ex_01": 3, "76": [3, 83, 97, 104, 106, 142], "01_tabular_data_exploration_sol_01": 3, "20": [3, 17, 28, 59, 73, 75, 84, 85, 90, 91, 96, 101, 102, 103, 104, 105, 106, 107, 113, 117, 118, 120, 122, 129, 130, 133, 135, 136, 142, 147, 148, 149, 155, 156, 158], "82": [3, 78, 80, 83, 100, 116, 117], "02_numerical_pipeline_cross_valid": 3, "02_numerical_pipeline_ex_00": 3, "75": [3, 59, 79, 81, 84, 90, 91, 98, 100, 104, 107], "02_numerical_pipeline_ex_01": 3, "68": [3, 75, 90, 104, 146, 154, 156], "02_numerical_pipeline_hands_on": 3, "92": [3, 80, 84, 91, 145, 153, 185], "02_numerical_pipeline_introduct": 3, "77": [3, 104, 106, 116, 142], "02_numerical_pipeline_sc": 3, "23": [3, 84, 91, 101, 104, 105, 106, 107, 108, 116, 163], "02_numerical_pipeline_sol_00": 3, "21": [3, 84, 91, 101, 104, 105, 107, 108, 147, 154, 156], "02_numerical_pipeline_sol_01": 3, "57": [3, 104, 105, 120], "03_categorical_pipelin": 3, "95": [3, 100, 107, 154], "03_categorical_pipeline_column_transform": 3, "55": [3, 95, 100, 104, 105, 117], "03_categorical_pipeline_ex_01": 3, "71": [3, 104, 162], "03_categorical_pipeline_ex_02": 3, "97": [3, 153, 163, 185], "03_categorical_pipeline_sol_01": 3, "03_categorical_pipeline_sol_02": 3, "27": [3, 73, 84, 91, 104, 105, 120, 130, 136], "03_categorical_pipeline_visu": 3, "79": [3, 72, 104], "cross_validation_baselin": 3, "9": [3, 28, 72, 73, 81, 82, 90, 91, 94, 97, 98, 99, 101, 104, 107, 108, 116, 117, 118, 120, 133, 135, 143, 145, 146, 147, 150, 154, 156, 158, 185], "33": [3, 101, 104, 107, 117, 153], "cross_validation_ex_01": 3, "cross_validation_ex_02": 3, "31": [3, 104, 114, 117, 123, 135, 142, 154], "cross_validation_group": 3, "6": [3, 12, 21, 38, 46, 70, 72, 73, 75, 81, 82, 90, 91, 94, 97, 98, 99, 101, 104, 105, 107, 108, 115, 116, 117, 118, 120, 129, 131, 133, 135, 137, 142, 145, 146, 147, 150, 153, 154, 156, 157, 158, 161, 162, 165, 177], "07": [3, 100, 108, 133, 150], "cross_validation_learning_curv": 3, "12": [3, 73, 81, 90, 91, 100, 103, 104, 105, 106, 107, 115, 117, 120, 142, 150, 156, 163], "49": [3, 84, 104, 116, 117, 125, 138, 154], "cross_validation_nest": 3, "22": [3, 91, 101, 104, 105, 107, 108, 130, 136, 145, 147, 153], "48": [3, 79, 80, 81, 91, 104, 120, 153], "cross_validation_sol_01": 3, "cross_validation_sol_02": 3, "cross_validation_stratif": 3, "08": [3, 28, 105, 133], "cross_validation_tim": 3, "cross_validation_train_test": 3, "cross_validation_validation_curv": 3, "18": [3, 28, 35, 73, 75, 79, 91, 104, 105, 117, 120, 129, 130, 135, 136, 139, 150, 151, 153, 154, 156, 158], "38": [3, 73, 79, 81, 84, 104, 118, 120, 130, 136, 150, 151, 154], "datasets_adult_censu": 3, "54": [3, 104, 105, 117, 120], "datasets_ames_h": 3, "29": [3, 91, 104, 108, 116, 118, 120, 125, 139, 154], "datasets_bike_rid": 3, "42": [3, 72, 79, 81, 83, 85, 104, 117, 120, 127, 132, 148, 149, 150, 152, 154, 155, 156], "datasets_blood_transfus": 3, "datasets_california_h": 3, "45": [3, 73, 80, 81, 84, 91, 101, 102, 104, 106, 116, 120, 134, 140, 141, 157], "dev_features_import": 3, "24": [3, 91, 101, 104, 105, 106, 107, 108, 142, 147, 154], "53": [3, 104, 105, 116, 125, 154], "ensemble_adaboost": 3, "ensemble_bag": 3, "ensemble_ex_01": 3, "66": [3, 90, 104, 105], "ensemble_ex_02": 3, "43": [3, 85, 91, 94, 101, 104, 105, 116, 118, 120], "ensemble_ex_03": 3, "ensemble_ex_04": 3, "ensemble_gradient_boost": 3, "25": [3, 59, 73, 79, 80, 81, 85, 91, 101, 102, 104, 105, 107, 108, 117, 120, 130, 134, 136, 140, 150, 151, 154], "63": [3, 104, 110, 154], "ensemble_hist_gradient_boost": 3, "41": [3, 80, 100, 101, 104, 107, 108, 117], "35": [3, 73, 104, 106, 117, 122, 136], "ensemble_hyperparamet": 3, "58": [3, 104, 105, 130, 135, 136, 150], "44": [3, 73, 79, 91, 101, 104, 120, 130, 135, 136, 150, 151, 154], "ensemble_introduct": 3, "28": [3, 73, 79, 81, 84, 91, 101, 104, 105, 106, 116, 120, 130, 136, 150, 151, 154], "ensemble_random_forest": 3, "ensemble_sol_01": 3, "64": [3, 94, 104, 105, 154], "ensemble_sol_02": 3, "ensemble_sol_03": 3, "62": [3, 90, 104, 118], "16": [3, 73, 81, 84, 89, 91, 94, 104, 106, 116, 117, 118, 120, 147, 153, 156, 157, 163], "ensemble_sol_04": 3, "30": [3, 73, 79, 84, 85, 91, 92, 94, 95, 97, 102, 104, 106, 107, 110, 116, 117, 118, 120, 130, 135, 136, 148, 150, 151, 152, 154, 155, 161, 163, 164], "40": [3, 28, 73, 75, 79, 80, 81, 85, 101, 104, 117, 129, 130, 134, 135, 136, 140, 149, 150, 151, 154, 156, 158], "feature_selection_ex_01": 3, "34": [3, 104, 107, 153, 154], "feature_selection_introduct": 3, "78": [3, 104, 142, 157], "feature_selection_limitation_model": 3, "feature_selection_sol_01": 3, "linear_models_ex_01": 3, "linear_models_ex_02": 3, "linear_models_ex_03": 3, "linear_models_ex_04": 3, "linear_models_feature_engineering_classif": 3, "06": 3, "linear_models_regular": 3, "linear_models_sol_01": 3, "32": [3, 85, 104, 117, 154], "01": [3, 10, 18, 31, 37, 56, 58, 62, 81, 96, 105, 117, 118, 130, 131, 136, 137, 150, 151, 153, 165, 167, 169, 180], "linear_models_sol_02": 3, "linear_models_sol_03": 3, "linear_models_sol_04": 3, "linear_regression_in_sklearn": 3, "linear_regression_non_linear_link": 3, "65": [3, 84, 90, 104, 118], "linear_regression_without_sklearn": 3, "logistic_regress": [3, 131, 132, 137, 141], "metrics_classif": 3, "metrics_ex_01": 3, "81": [3, 28, 72, 78, 83, 116, 117], "metrics_ex_02": 3, "metrics_regress": 3, "metrics_sol_01": 3, "metrics_sol_02": 3, "parameter_tuning_ex_02": 3, "72": [3, 104], "parameter_tuning_ex_03": 3, "8": [3, 59, 73, 75, 79, 82, 84, 90, 91, 94, 97, 98, 99, 100, 101, 103, 104, 107, 108, 110, 114, 115, 116, 117, 118, 120, 121, 123, 131, 132, 133, 136, 137, 139, 142, 147, 148, 150, 153, 154, 155, 156, 157, 162, 163, 177, 179, 185], "parameter_tuning_grid_search": 3, "parameter_tuning_manu": 3, "46": [3, 84, 91, 101, 104, 115, 116, 156], "parameter_tuning_nest": 3, "11": [3, 73, 84, 91, 104, 117, 118, 120, 133, 150, 156, 161, 177], "parameter_tuning_parallel_plot": 3, "74": [3, 104], "parameter_tuning_randomized_search": 3, "parameter_tuning_sol_02": 3, "89": [3, 81, 91, 125], "parameter_tuning_sol_03": 3, "trees_classif": 3, "trees_dataset": 3, "trees_ex_01": 3, "61": [3, 100, 104, 117, 133, 154], "trees_ex_02": 3, "trees_hyperparamet": 3, "trees_regress": 3, "17": [3, 28, 73, 75, 79, 81, 91, 101, 104, 116, 117, 120, 129, 135, 141, 145, 147, 153, 154, 156, 157, 158], "trees_sol_01": 3, "96": 3, "trees_sol_02": 3, "51": [3, 104, 185], "lot": [5, 73, 107, 108, 127, 133, 142], "materi": 5, "far": [5, 46, 91, 102, 115, 122, 126, 133], "congratul": 5, "And": [5, 100], "thank": [5, 132, 152], "everyon": 5, "instructor": 5, "staff": 5, "help": [5, 59, 65, 81, 83, 87, 89, 92, 93, 97, 98, 102, 104, 105, 106, 107, 108, 117, 124, 127, 131, 132, 133, 137, 141, 142, 145, 185], "forum": [5, 35], "student": [5, 35], "hard": [5, 72, 73, 74, 75, 124, 127, 141, 142, 147], "work": [5, 39, 59, 64, 73, 81, 84, 85, 99, 100, 105, 110, 117, 119, 126, 127, 130, 133, 136, 148, 151, 153, 155, 157, 159, 162, 163, 165, 170, 185], "summar": [5, 101, 109, 117, 119], "train": [5, 14, 15, 16, 17, 24, 25, 28, 33, 34, 36, 39, 41, 43, 46, 50, 52, 55, 57, 58, 59, 60, 65, 68, 70, 71, 73, 76, 77, 78, 81, 82, 83, 84, 85, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 102, 103, 105, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 138, 139, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 165, 174, 175, 177, 182, 185], "test": [5, 17, 25, 28, 33, 34, 36, 43, 46, 52, 55, 57, 58, 59, 65, 71, 72, 76, 77, 78, 81, 82, 83, 84, 85, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 102, 108, 110, 111, 112, 113, 114, 115, 117, 118, 120, 121, 122, 123, 124, 125, 126, 127, 130, 131, 133, 135, 136, 137, 141, 142, 143, 145, 146, 148, 150, 151, 152, 154, 155, 156, 159, 162, 163, 165, 175, 177, 179, 185], "built": [5, 36, 93, 98, 108, 110, 139, 142, 161, 163, 173], "matrix": [5, 28, 81, 84, 94, 103, 119, 124, 127, 133, 139], "featur": [5, 14, 15, 16, 17, 23, 28, 32, 33, 34, 36, 39, 41, 42, 46, 48, 57, 59, 63, 64, 65, 68, 70, 71, 72, 73, 74, 75, 76, 79, 80, 85, 86, 88, 90, 91, 93, 97, 98, 101, 103, 104, 105, 106, 107, 109, 110, 115, 116, 117, 119, 124, 127, 129, 130, 135, 136, 140, 141, 145, 150, 151, 152, 156, 157, 158, 159, 160, 161, 162, 163, 164, 172, 173, 177, 185], "observ": [5, 27, 43, 46, 50, 52, 73, 75, 81, 83, 84, 85, 87, 89, 91, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 105, 106, 109, 110, 113, 115, 117, 119, 122, 123, 125, 132, 133, 135, 136, 137, 139, 141, 142, 145, 150, 153, 156, 157, 158, 161, 162, 164, 174], "transform": [5, 28, 39, 42, 65, 68, 80, 81, 84, 85, 90, 93, 96, 98, 101, 102, 105, 110, 116, 119, 124, 127, 129, 130, 132, 133, 135, 136, 137, 139, 145, 150, 151, 152, 153, 154, 179], "often": [5, 39, 41, 43, 57, 60, 73, 84, 85, 96, 98, 110, 137, 139, 145, 147, 152, 177, 182, 185], "typic": [5, 12, 48, 70, 73, 84, 101, 107, 117, 124, 127, 132, 141, 145, 147, 152, 154, 172], "categor": [5, 36, 46, 63, 68, 70, 71, 72, 73, 74, 75, 86, 88, 103, 104, 119, 130, 132, 133, 136, 141, 142, 150, 158, 165, 177], "variabl": [5, 23, 28, 35, 39, 46, 48, 57, 59, 63, 65, 67, 68, 72, 76, 77, 80, 81, 82, 86, 88, 91, 94, 95, 100, 101, 102, 103, 104, 106, 107, 110, 117, 119, 130, 133, 136, 140, 141, 145, 150, 152, 154, 156, 158, 165, 177, 179, 185], "inform": [5, 23, 28, 46, 72, 73, 76, 80, 84, 85, 91, 94, 95, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 115, 117, 118, 125, 126, 133, 139, 141, 142, 143, 146, 150, 151, 154, 156, 158], "seek": [5, 33, 117, 118, 142], "suffic": [5, 117], "But": [5, 90, 97, 100, 101, 107, 132, 133, 152, 153], "larg": [5, 17, 46, 68, 85, 96, 101, 105, 107, 113, 114, 116, 117, 122, 123, 126, 127, 131, 133, 135, 137, 145, 149, 152, 153, 154, 156, 179], "detect": 5, "underfit": [5, 12, 21, 28, 32, 38, 42, 43, 50, 52, 55, 57, 58, 59, 60, 95, 115, 117, 118, 119, 131, 132, 135, 137, 139, 157, 165, 170, 172], "multipl": [5, 84, 94, 108, 110, 115, 129, 130, 135, 136, 139, 143, 144, 146, 147, 153, 172, 173], "hyper": [5, 17, 36, 96, 98, 152, 156, 179], "control": [5, 43, 77, 81, 82, 92, 96, 97, 102, 117, 119, 125, 131, 132, 137, 139, 148, 149, 151, 153, 155, 156, 157, 161, 172, 182], "import": [5, 12, 17, 21, 28, 35, 36, 46, 57, 59, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 162, 163, 164, 165, 168, 170, 177, 179, 181, 185], "randomsearchcv": 5, "understand": [5, 12, 21, 32, 38, 55, 77, 79, 82, 84, 94, 95, 99, 102, 128, 134, 142, 161, 162, 170, 171, 182], "suit": [5, 132, 145, 171], "intuit": [5, 9, 10, 12, 13, 21, 38, 44, 55, 70, 79, 80, 84, 103, 105, 107, 109, 110, 115, 123, 131, 132, 137, 138, 139, 141, 157, 158, 162, 163, 165], "debug": 5, "build": [5, 17, 28, 46, 70, 72, 73, 80, 90, 92, 97, 110, 116, 126, 129, 132, 135, 150, 161, 165, 167], "combin": [5, 12, 13, 15, 41, 42, 65, 73, 79, 81, 85, 94, 96, 106, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 123, 126, 132, 133, 135, 138, 139, 147, 148, 149, 150, 152, 154, 155, 156, 157, 172, 173, 179, 182, 183], "particularli": [5, 72, 86, 88, 117], "few": [5, 73, 74, 75, 78, 79, 83, 101, 104, 106, 107, 117, 119, 129, 132, 135, 137, 152, 158], "benefit": [5, 16, 23, 32, 81, 84, 95, 115, 118, 119, 133, 145, 165], "non": [5, 19, 28, 38, 41, 42, 43, 46, 65, 68, 73, 81, 89, 92, 96, 97, 98, 103, 104, 105, 106, 107, 108, 110, 125, 129, 133, 135, 141, 156, 157, 162, 164, 165, 171, 172, 183], "engin": [5, 39, 42, 46, 94, 105, 129, 130, 133, 135, 136, 165], "base": [5, 12, 13, 14, 15, 17, 28, 33, 35, 41, 48, 59, 73, 74, 75, 81, 87, 89, 94, 101, 103, 105, 110, 115, 118, 119, 125, 126, 129, 130, 135, 136, 137, 139, 141, 142, 147, 150, 152, 157, 158, 165, 185], "seri": [5, 91, 93, 94, 98, 100, 108, 110, 115, 137, 141, 142, 157], "threshold": [5, 14, 26, 73, 90, 107, 157, 162, 163, 173], "variou": [5, 55, 90], "attribut": [5, 36, 73, 81, 84, 94, 96, 101, 107, 108, 110, 112, 121, 131, 137, 138, 139, 141, 149, 150, 154, 156, 179, 185], "tabular": [5, 63, 70, 73, 80, 85, 165], "natur": [5, 21, 35, 80, 84, 85, 101, 105, 110, 132, 133, 154], "miss": [5, 73, 84, 94, 101, 104, 106, 107, 129, 135, 154, 185], "histgradientboostingregressor": [5, 17, 28, 116, 117, 123, 147], "classifi": [5, 14, 23, 26, 41, 68, 74, 75, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 97, 98, 99, 106, 109, 110, 119, 125, 130, 131, 132, 136, 137, 141, 143, 146, 147, 148, 150, 151, 152, 154, 155, 157, 159, 161, 163, 171, 172, 179, 181, 185], "goto": 5, "strongli": [5, 108], "advis": [5, 108], "pointer": 5, "doc": 5, "rich": 5, "didact": [5, 35, 84, 85, 104], "improv": [5, 35, 65, 89, 90, 94, 95, 97, 98, 102, 111, 113, 117, 120, 122, 139, 145, 153, 179, 182], "compris": [5, 142], "guid": [5, 132, 141, 145, 185], "everi": [5, 73, 76, 101, 103, 105, 108, 138, 152], "explain": [5, 17, 28, 38, 57, 85, 108, 115, 116, 122, 126, 137, 145, 150, 170], "demonstr": [5, 85, 91, 109, 115, 116, 133, 139, 161], "good": [5, 21, 24, 46, 73, 76, 78, 79, 80, 83, 84, 85, 87, 89, 99, 100, 101, 102, 106, 108, 117, 118, 119, 126, 128, 129, 133, 134, 135, 137, 138, 141, 142, 150, 152, 153, 154, 156, 157, 179], "softwar": [5, 35, 80], "ask": [5, 124, 127, 128, 131, 134, 137, 142], "question": [5, 79, 87, 89, 104, 128, 129, 130, 134, 135, 136, 137, 142, 156], "stackoverflow": 5, "github": [5, 35, 79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "discuss": [5, 13, 17, 35, 46, 73, 76, 80, 109, 115, 117, 118, 131, 133, 137, 157], "driven": [5, 137], "inclus": 5, "contribut": [5, 59, 81, 94, 133, 156, 179], "other": [5, 13, 26, 28, 35, 43, 59, 72, 73, 76, 80, 81, 84, 85, 86, 88, 91, 92, 93, 97, 98, 99, 100, 103, 105, 108, 110, 117, 125, 129, 133, 135, 136, 138, 139, 141, 145, 150, 152, 153, 154, 160, 163, 164, 179, 182, 185], "advocaci": 5, "curat": 5, "our": [5, 17, 24, 28, 41, 46, 62, 63, 70, 76, 78, 79, 80, 81, 83, 85, 86, 88, 90, 91, 92, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 108, 109, 110, 113, 114, 115, 117, 122, 123, 124, 125, 127, 128, 131, 132, 133, 134, 137, 138, 139, 140, 141, 142, 143, 145, 146, 151, 153, 157, 158, 162, 164, 165, 185], "overflow": 5, "code": [5, 28, 35, 48, 68, 74, 77, 78, 86, 92, 93, 103, 110, 111, 112, 113, 114, 116, 124, 127, 128, 129, 130, 131, 133, 134, 139, 141, 143, 144, 148, 149, 150, 152, 153, 155, 156, 159, 160, 179], "start": [5, 35, 46, 73, 77, 78, 79, 81, 82, 83, 84, 85, 87, 89, 91, 94, 95, 98, 99, 100, 101, 105, 106, 115, 117, 118, 124, 125, 127, 129, 130, 131, 132, 133, 135, 136, 137, 140, 141, 142, 145, 147, 149, 151, 156, 157, 158], "carpentri": 5, "resourc": [5, 35, 70, 73, 105, 117, 152], "git": 5, "lab": [5, 35], "unsupervis": [5, 48], "structur": [5, 57, 70, 73, 81, 84, 85, 94, 103, 117, 145, 156, 157, 162, 171], "instanc": [5, 46, 63, 73, 79, 80, 81, 84, 94, 101, 104, 105, 107, 108, 111, 114, 120, 123, 124, 125, 126, 127, 129, 132, 133, 135, 138, 140, 142, 144, 145, 147, 151, 154, 175, 181, 182], "sampl": [5, 13, 14, 15, 17, 19, 28, 41, 43, 48, 58, 60, 63, 73, 74, 75, 76, 79, 80, 81, 84, 85, 92, 93, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 113, 115, 116, 117, 119, 122, 124, 127, 128, 130, 132, 133, 134, 136, 137, 139, 140, 141, 142, 147, 149, 150, 152, 154, 156, 157, 158, 160, 161, 162, 164, 165, 174, 183], "supervis": [5, 48, 101, 172], "recov": [5, 17, 94, 133], "link": [5, 14, 78, 79, 83, 94, 104, 105, 106, 107, 108, 117, 124, 127, 142, 145], "drive": 5, "system": [5, 73, 94], "hand": [5, 28, 90, 94, 109, 122, 133, 135, 139, 150, 152], "nuanc": 5, "deep": [5, 89, 117, 118, 161], "better": [5, 16, 17, 27, 46, 72, 76, 80, 83, 87, 88, 89, 90, 91, 92, 93, 94, 96, 97, 98, 99, 101, 102, 110, 117, 118, 119, 124, 127, 128, 130, 133, 134, 136, 137, 138, 147, 151, 152, 154, 156, 161, 177, 185], "gradient": [5, 9, 12, 13, 15, 16, 17, 28, 81, 85, 109, 113, 114, 118, 122, 123, 147, 150, 154, 165], "boost": [5, 12, 13, 15, 16, 17, 28, 85, 113, 114, 118, 122, 123, 150, 154, 165], "classif": [5, 14, 21, 37, 38, 41, 48, 59, 63, 72, 73, 79, 80, 86, 88, 90, 92, 93, 94, 97, 98, 99, 101, 106, 109, 129, 130, 132, 135, 136, 140, 143, 144, 145, 146, 147, 161, 162, 163, 165, 170, 171, 172, 185], "regress": [5, 17, 21, 26, 37, 38, 39, 40, 41, 42, 43, 44, 48, 52, 57, 59, 65, 72, 79, 81, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 97, 98, 99, 101, 107, 110, 112, 115, 121, 124, 127, 129, 130, 131, 135, 136, 137, 141, 142, 144, 147, 149, 151, 156, 157, 160, 161, 164, 165, 170, 171, 172, 177, 185], "nativ": [5, 73, 79, 84, 85, 101, 125, 136, 139, 150, 152, 154], "task": [5, 41, 72, 73, 79, 90, 101, 103, 106, 132, 141, 172], "input": [5, 23, 39, 41, 57, 72, 73, 78, 80, 81, 83, 84, 87, 88, 89, 91, 94, 97, 98, 108, 110, 128, 134, 139, 140, 141, 152, 157, 158, 172, 185], "speech": 5, "text": [5, 35, 48, 105, 132, 140], "imag": [5, 94], "voic": 5, "pretrain": 5, "human": [5, 73, 105, 132], "cost": [5, 80, 101, 105, 118, 136, 137, 153, 154], "mainten": 5, "Not": [5, 85, 86, 88, 96, 136], "look": [5, 28, 62, 63, 65, 74, 75, 77, 79, 80, 82, 84, 94, 95, 97, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 117, 129, 131, 133, 135, 137, 139, 140, 142, 150, 157, 158, 165, 179, 185], "pytorch": 5, "tensorflow": 5, "introduct": [5, 55, 70, 165], "andrea": 5, "c": [5, 14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 94, 96, 130, 132, 136, 141, 151, 163, 172, 173, 174, 175, 177, 179, 181, 185], "m\u00fcller": 5, "sarah": 5, "guido": 5, "handbook": 5, "jake": 5, "van": 5, "der": 5, "pla": 5, "broader": [5, 160, 164], "statist": [5, 17, 55, 73, 75, 80, 81, 84, 87, 89, 101, 103, 107, 109, 110, 124, 127, 142, 185], "jame": 5, "witten": 5, "hasti": 5, "tibshirani": 5, "theori": [5, 109], "concept": [5, 12, 13, 21, 22, 32, 33, 38, 39, 55, 57, 71, 94, 99, 101, 147, 170, 171, 183], "explor": [5, 46, 74, 75, 79, 81, 84, 97, 103, 112, 114, 121, 123, 132, 133, 139, 149, 150, 152, 153, 154, 156, 165, 185], "kera": 5, "aur\u00e9lien": 5, "g\u00e9ron": 5, "kaggl": 5, "particip": 5, "challeng": [5, 35, 106, 139], "team": 5, "solut": [5, 9, 10, 11, 17, 18, 29, 30, 31, 35, 37, 40, 44, 58, 62, 64, 67, 84, 100, 124, 125, 138, 150, 165, 167, 176, 178, 180], "share": [5, 110], "winner": 5, "wai": [5, 70, 72, 73, 76, 79, 83, 84, 96, 97, 98, 100, 109, 110, 115, 116, 117, 124, 125, 127, 139, 142, 145, 162, 177, 185], "now": [5, 17, 28, 46, 59, 72, 76, 77, 79, 80, 81, 82, 83, 84, 85, 86, 88, 91, 92, 93, 94, 95, 97, 98, 99, 100, 101, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 142, 143, 146, 148, 150, 152, 153, 154, 155, 157, 159, 160, 161, 163, 164, 177, 185], "touch": 5, "briefli": 5, "fit": [5, 13, 23, 25, 28, 38, 39, 41, 42, 43, 46, 50, 63, 65, 76, 77, 79, 82, 83, 84, 87, 89, 90, 92, 96, 97, 98, 100, 101, 102, 108, 109, 110, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 128, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 145, 147, 150, 151, 152, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 165, 170, 174, 179], "wider": [5, 35, 52], "mai": [5, 24, 42, 43, 46, 48, 73, 79, 81, 101, 107, 117, 129, 130, 133, 135, 136, 137, 138, 141, 142, 147, 154, 156, 161, 163], "fail": [5, 100, 132, 143, 146], "weak": [5, 15, 117, 137, 154], "analysi": [5, 70, 79, 92, 97, 106, 107, 133, 145, 154, 156, 157, 165, 178], "kei": [5, 7, 8, 45, 47, 49, 51, 53, 61, 65, 66, 92, 97, 99, 102, 117, 120, 123, 125, 126, 137, 142, 150, 151, 154, 161, 166], "achiev": [5, 17, 73, 78, 81, 83, 95, 97, 106, 132, 133, 161], "reliabl": [5, 94, 141], "even": [5, 35, 38, 60, 75, 76, 80, 84, 85, 87, 88, 89, 94, 98, 100, 101, 103, 109, 116, 117, 122, 125, 132, 133, 137, 139, 141, 142, 143, 144, 146, 147, 150, 156, 182, 183], "cross": [5, 12, 17, 21, 22, 23, 24, 25, 28, 32, 33, 38, 39, 43, 46, 55, 56, 58, 59, 64, 65, 72, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 97, 98, 99, 100, 102, 103, 107, 108, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 130, 133, 135, 136, 142, 143, 144, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 161, 165, 170, 175, 177, 179, 182, 183, 185], "accuraci": [5, 17, 26, 59, 65, 72, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 92, 94, 96, 97, 98, 99, 106, 109, 115, 125, 126, 127, 128, 130, 131, 134, 136, 137, 141, 143, 146, 147, 150, 151, 152, 153, 154, 155, 156, 157, 159, 163, 185], "imperfect": [5, 108], "estim": [5, 12, 16, 21, 28, 32, 38, 43, 46, 55, 59, 65, 70, 76, 80, 81, 84, 85, 90, 96, 98, 100, 107, 108, 109, 110, 111, 113, 114, 116, 117, 118, 119, 120, 122, 123, 124, 125, 127, 129, 130, 133, 135, 136, 139, 144, 145, 147, 150, 151, 152, 154, 170, 177, 179, 182, 185], "actual": [5, 73, 77, 80, 82, 91, 100, 101, 102, 117, 132, 142, 145, 152], "gener": [5, 16, 17, 21, 22, 27, 28, 46, 49, 55, 57, 58, 59, 65, 76, 78, 79, 80, 81, 83, 84, 85, 86, 88, 89, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 103, 104, 106, 107, 109, 110, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 129, 130, 133, 135, 136, 137, 139, 141, 142, 144, 145, 147, 148, 149, 150, 151, 152, 154, 155, 156, 162, 165, 168, 177, 179, 185], "As": [5, 28, 46, 73, 76, 79, 80, 81, 84, 85, 87, 89, 92, 94, 96, 97, 98, 103, 104, 106, 107, 108, 110, 115, 116, 132, 133, 137, 141, 142, 144, 145, 147, 151, 152, 153, 156, 157, 161, 164], "narrow": 5, "spend": [5, 105, 117], "increasingli": 5, "effort": [5, 105], "split": [5, 14, 17, 22, 28, 36, 41, 43, 65, 73, 76, 78, 83, 84, 85, 91, 94, 96, 99, 100, 101, 104, 108, 111, 115, 116, 117, 119, 120, 122, 124, 127, 142, 145, 150, 152, 154, 157, 159, 161, 162, 163, 172, 173], "afford": 5, "trust": [5, 79, 80, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 137, 138, 139, 142, 150, 151, 152, 154, 157, 163, 164], "think": [5, 74, 75, 100, 105, 129, 135, 137], "carefulli": [5, 94], "complet": [5, 14, 28, 35, 46, 96, 98, 124, 127, 132, 133, 149, 152, 156, 179], "futur": [5, 73, 76, 85, 90, 100, 101, 105, 136, 142, 150, 159, 163], "upon": [5, 90, 97, 98], "affect": [5, 100, 133, 137, 141, 150, 157, 159, 162, 163], "live": [5, 133], "sure": [5, 17, 59, 79, 81, 84, 89, 114, 123], "divers": [5, 157], "demograph": [5, 103, 118], "increas": [5, 15, 17, 27, 28, 42, 43, 46, 50, 52, 57, 75, 81, 84, 97, 101, 102, 108, 113, 116, 117, 122, 130, 133, 136, 137, 140, 141, 150, 154, 156, 157, 159, 161, 162, 163, 175, 183], "coverag": 5, "phrase": 5, "recommend": [5, 35, 70, 73, 84], "identifi": [5, 12, 70, 85, 94, 97, 102, 142, 179], "ani": [5, 14, 17, 28, 35, 42, 72, 73, 76, 79, 81, 83, 85, 88, 89, 91, 94, 97, 99, 100, 101, 102, 103, 105, 106, 107, 108, 110, 117, 118, 119, 123, 124, 125, 126, 127, 132, 133, 138, 141, 145, 150, 152, 153, 156, 161, 174, 185], "bia": [5, 36, 50, 55, 57, 108, 129, 130, 132, 135, 136, 145, 165], "acquisit": 5, "full": [5, 7, 8, 17, 35, 45, 47, 48, 49, 51, 53, 61, 65, 66, 76, 77, 81, 82, 84, 96, 101, 117, 122, 124, 126, 127, 140, 148, 152, 155, 166, 179, 182], "chain": [5, 76, 81, 85], "acquir": [5, 12, 21, 32, 38, 55, 102, 105, 170, 182], "fanci": 5, "put": [5, 32, 36, 81, 102, 108, 133, 157], "product": [5, 96, 99, 101, 129, 133, 135, 150], "routin": [5, 17, 94, 185], "debt": 5, "simpler": [5, 13, 46, 85, 137], "easier": [5, 81, 84, 91, 139, 145], "maintain": 5, "less": [5, 28, 43, 85, 94, 100, 103, 107, 108, 109, 110, 117, 118, 126, 130, 132, 133, 135, 136, 137, 139, 150, 156], "power": [5, 28, 35, 105, 117, 118, 129, 133, 135, 139, 157], "drift": 5, "gave": [5, 142], "methodolog": [5, 35, 55, 101, 162], "element": [5, 21, 80, 84, 94, 101, 133, 134, 140, 142], "alwai": [5, 14, 17, 21, 23, 41, 43, 52, 78, 79, 80, 83, 85, 89, 91, 94, 96, 97, 98, 99, 101, 102, 106, 107, 119, 125, 142, 151, 152, 153, 164, 177, 179, 185], "solid": 5, "conclus": [5, 73, 93, 94, 96, 98, 99, 100, 102, 103, 106, 117, 125, 136], "standpoint": 5, "biggest": 5, "shortcom": 5, "cannot": [5, 17, 28, 57, 73, 99, 100, 102, 108, 117, 124, 127, 132, 139, 142, 145, 147, 150, 153, 164, 182], "autom": [5, 73, 165, 183], "domain": 5, "knowledg": [5, 35, 70, 96, 102, 110, 127, 139, 152], "critic": [5, 35, 104], "thing": [5, 73, 84, 85, 90, 100, 139, 150], "oper": [5, 81, 110, 132, 142, 152], "risk": [5, 129, 135], "advertis": 5, "individu": [5, 16, 28, 73, 81, 85, 103, 106, 110, 112, 119, 121, 133, 141, 157, 185], "caus": [5, 17, 50, 57, 73, 84, 88, 89, 117, 133, 150, 152, 179], "wast": [5, 117], "bit": [5, 28, 83, 101, 102, 108, 133, 139, 143, 146, 152], "monei": 5, "annoi": 5, "otherwis": [5, 17, 79, 88, 94, 96, 119, 132, 139, 140, 161], "mostli": [5, 136, 137, 156], "harmless": 5, "medicin": 5, "kill": 5, "logic": [5, 136, 150, 163], "fals": [5, 15, 52, 80, 84, 87, 89, 94, 96, 100, 105, 107, 123, 125, 126, 132, 133, 135, 136, 139, 142, 146, 149, 150, 154, 156, 157, 163, 175, 177], "brain": 5, "tumor": 5, "sent": 5, "surgeri": 5, "veri": [5, 17, 55, 59, 73, 79, 85, 87, 89, 96, 97, 99, 102, 106, 107, 108, 110, 115, 117, 119, 123, 132, 133, 137, 139, 142, 150, 151, 152, 153, 154, 161, 179], "danger": [5, 133, 154], "mr": 5, "confirm": [5, 91, 99, 105, 107, 110, 124, 127, 130, 132, 136, 137, 141, 145, 152, 153], "should": [5, 17, 22, 28, 33, 34, 43, 46, 68, 72, 73, 80, 81, 85, 89, 94, 96, 98, 99, 100, 101, 102, 104, 105, 106, 108, 109, 114, 115, 117, 118, 123, 124, 125, 127, 128, 133, 134, 137, 139, 142, 143, 146, 147, 148, 150, 151, 152, 155, 157, 161, 162, 175, 177, 179, 182, 183], "person": [5, 63, 73, 79, 103, 106, 142, 150], "delai": 5, "life": [5, 73, 124, 127, 145], "save": [5, 185], "treatment": [5, 57], "hospit": [5, 24], "stai": [5, 39, 43, 110, 133], "overcrowd": 5, "unit": [5, 73, 79, 81, 84, 85, 101, 103, 105, 107, 108, 109, 133, 136, 138, 140, 145, 150, 154], "chang": [5, 27, 28, 36, 46, 81, 93, 98, 108, 109, 113, 122, 125, 133, 145, 148, 151, 155, 157, 182, 185], "inpati": 5, "chose": [5, 52, 73, 115, 133], "load": [5, 17, 63, 74, 75, 76, 77, 81, 82, 84, 85, 86, 88, 92, 95, 96, 97, 99, 100, 101, 102, 103, 106, 107, 109, 111, 116, 117, 118, 120, 129, 130, 131, 132, 133, 135, 136, 137, 138, 139, 142, 150, 151, 153, 154, 157, 158, 159, 160, 161, 162, 163, 164, 179, 185], "interest": [5, 48, 73, 74, 75, 91, 93, 98, 99, 101, 102, 103, 105, 107, 110, 115, 129, 132, 135, 136, 138, 139, 141, 142, 145, 147, 150, 151, 152, 153, 154, 156, 163, 185], "focus": [5, 13, 60, 107, 109, 133, 142, 152], "easi": [5, 72, 85, 100, 101, 105, 132, 161], "accumul": 5, "target": [5, 17, 28, 38, 39, 42, 46, 48, 57, 59, 63, 65, 72, 73, 74, 75, 76, 77, 78, 79, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 154, 155, 156, 157, 158, 159, 163, 177, 185], "proxi": [5, 142], "reflect": [5, 101, 110, 119], "ground": [5, 115, 142], "truth": [5, 115, 142], "polici": [5, 24], "uneven": 5, "across": [5, 41, 46, 76, 81, 91, 108, 133, 141, 154, 185], "popul": [5, 73, 101, 103, 107, 108, 142, 156], "eg": 5, "qualiti": [5, 108, 141, 145, 152], "affair": 5, "desir": [5, 78, 83, 119, 133], "qualif": 5, "respons": 5, "women": 5, "pai": [5, 84, 109], "men": 5, "pick": [5, 28, 90, 96, 104, 105, 108, 116, 151, 182], "amplifi": 5, "inequ": 5, "mechan": [5, 28, 80, 81, 152], "die": 5, "naiv": [5, 17, 28, 73, 96, 106, 116, 119], "bad": [5, 88, 97, 127, 133, 142, 179], "health": [5, 35], "fallaci": 5, "compar": [5, 12, 16, 17, 21, 28, 36, 46, 48, 58, 59, 72, 73, 76, 78, 80, 81, 83, 84, 85, 86, 88, 96, 97, 98, 102, 107, 108, 110, 113, 115, 116, 120, 122, 125, 126, 127, 129, 130, 133, 135, 136, 142, 145, 154, 156, 157, 165, 177, 185], "wors": [5, 17, 27, 46, 72, 97, 98, 100, 138, 177], "baselin": [5, 21, 23, 78, 83, 85, 86, 88, 93, 94, 97, 98, 118, 165], "heart": [5, 28, 105, 129, 135], "pressur": 5, "greater": [5, 28, 72, 132], "trigger": 5, "care": [5, 33, 46, 84, 96, 99, 106, 107, 124, 127, 133], "which": [5, 13, 15, 16, 17, 22, 24, 26, 28, 32, 33, 39, 41, 43, 46, 57, 65, 68, 71, 72, 73, 76, 79, 80, 81, 83, 84, 85, 89, 90, 91, 92, 95, 96, 97, 98, 100, 101, 102, 103, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 124, 125, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 153, 154, 156, 157, 161, 162, 163, 170, 171, 172, 177, 179, 181, 183, 185], "learner": [5, 13, 109, 110, 115, 117, 118], "predictor": [5, 13, 14, 15, 16, 65, 80, 81, 85, 98, 102, 120, 130, 133, 136, 150, 157], "pure": [5, 127, 163], "benefici": [5, 32, 81, 117, 125, 152, 154, 161], "intervent": [5, 73], "brittl": 5, "interpret": [5, 16, 35, 103, 105, 107, 108, 131, 135, 136, 137, 138, 139, 145, 153, 157, 163], "subject": [5, 73, 142], "caution": [5, 89, 108], "feedback": 5, "loop": [5, 96, 108, 148, 150, 152, 155, 185], "todai": 5, "ai": 5, "alloc": 5, "loan": 5, "screen": [5, 7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "job": [5, 81], "prioritis": 5, "treatement": 5, "law": [5, 28], "enforc": [5, 100, 105, 117, 133, 137], "court": 5, "fairlearn": [5, 73], "assess": [5, 24, 46, 76, 80, 93, 95, 96, 98, 102, 113, 122, 128, 133, 134, 142, 145, 152, 156], "shift": [5, 81, 137], "technolog": [5, 94], "induc": [5, 108, 110, 130, 136, 137], "societi": 5, "though": [5, 98, 116, 144, 147], "difficult": [5, 105, 117, 138, 145], "intersect": [5, 103, 153, 156], "No": [5, 23], "found": [5, 59, 96, 106, 107, 114, 117, 123, 133, 138, 148, 150, 152, 155, 156, 157, 161, 162, 179, 185], "short": [5, 33, 79, 107, 118, 158], "move": [5, 85, 103, 105, 136, 137, 153, 156], "choos": [5, 17, 35, 73, 86, 88, 101, 108, 124, 125, 127, 132, 133, 139, 141, 152, 175, 185], "revolut": 5, "fantast": [5, 127], "opportun": 5, "With": [5, 36, 88, 101, 105, 117, 125, 126, 142, 150, 154, 161], "lift": 5, "roadblock": 5, "hope": [5, 90, 132, 145], "empow": 5, "varieti": [5, 35, 76], "mindset": 5, "dream": 5, "being": [5, 41, 91, 141, 156], "adventur": 5, "navig": [7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "slide": [7, 8, 45, 47, 49, 51, 53, 61, 66, 103, 131, 132, 137, 141, 153, 156, 166], "first": [7, 8, 28, 45, 47, 49, 51, 53, 55, 61, 62, 63, 64, 66, 70, 74, 75, 76, 77, 78, 79, 81, 82, 83, 84, 85, 86, 87, 88, 89, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 114, 115, 116, 117, 118, 119, 121, 123, 124, 125, 126, 127, 129, 132, 133, 135, 138, 139, 141, 142, 143, 144, 145, 146, 147, 150, 152, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 182], "click": [7, 8, 14, 45, 47, 49, 51, 53, 61, 66, 103, 153, 156, 166, 179], "press": [7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "arrow": [7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "go": [7, 8, 12, 21, 28, 32, 35, 38, 45, 47, 49, 51, 53, 55, 61, 66, 70, 76, 77, 82, 84, 92, 94, 96, 97, 100, 101, 102, 103, 105, 107, 108, 115, 118, 125, 141, 142, 147, 150, 157, 166, 170, 182], "next": [7, 8, 45, 47, 49, 51, 53, 57, 61, 66, 79, 80, 84, 90, 92, 93, 97, 98, 99, 100, 109, 115, 117, 118, 129, 132, 133, 135, 137, 139, 151, 157, 166], "previou": [7, 8, 15, 17, 21, 28, 45, 46, 47, 49, 51, 53, 59, 61, 63, 65, 66, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 93, 94, 95, 96, 98, 99, 100, 101, 102, 110, 111, 114, 115, 116, 117, 119, 120, 123, 124, 127, 128, 129, 131, 132, 133, 134, 135, 137, 138, 139, 143, 146, 148, 150, 151, 152, 153, 154, 155, 156, 157, 159, 160, 163, 164, 166, 177, 179, 182, 185], "p": [7, 8, 45, 47, 49, 51, 53, 61, 66, 94, 135, 166], "toggl": [7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "mode": [7, 8, 35, 45, 47, 49, 51, 53, 61, 66, 166], "adapt": [9, 99, 118, 139, 145, 154, 165], "adaboost": [9, 12, 115, 165], "gbdt": [9, 109, 122, 165], "exercis": [9, 10, 11, 18, 28, 29, 30, 31, 35, 37, 40, 44, 46, 58, 62, 64, 67, 79, 84, 117, 132, 138, 139, 150, 152, 157, 165, 167, 176, 177, 178, 180], "m6": [9, 10, 11, 117, 165], "03": [9, 11, 20, 28, 30, 40, 44, 54, 64, 67, 165, 176], "speed": [9, 28, 33, 34, 43, 105, 108, 125, 152, 154, 165], "quiz": [9, 10, 11, 18, 19, 20, 29, 30, 37, 40, 44, 54, 56, 58, 62, 64, 67, 156, 165, 167, 168, 169, 176, 178, 180], "02": [9, 10, 19, 29, 40, 58, 64, 81, 165, 167, 176, 178], "bag": [10, 12, 14, 15, 109, 111, 117, 118, 119, 120, 165], "introductori": [10, 141, 165], "forest": [10, 12, 13, 14, 16, 17, 36, 108, 112, 113, 115, 116, 118, 121, 122, 125, 126, 165], "thi": [12, 13, 17, 21, 22, 26, 28, 32, 33, 35, 38, 39, 42, 46, 48, 49, 55, 57, 59, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 170, 171, 174, 177, 179, 182, 183, 185], "togeth": [12, 13, 67, 72, 80, 81, 88, 93, 94, 96, 98, 109, 115, 165], "ensembl": [12, 13, 16, 17, 28, 57, 85, 87, 89, 108, 109, 110, 113, 115, 116, 117, 119, 120, 121, 122, 123, 125, 126, 147, 148, 150, 152, 154, 155], "famili": [12, 13, 38, 55, 57, 73, 79, 81, 85, 118, 136, 151], "techniqu": [12, 32, 73, 108, 132], "bootstrap": [12, 13, 17, 117, 118, 119, 120, 165], "ii": [12, 21, 76, 80, 138], "belong": [12, 17, 46, 79, 83, 84, 94, 97, 106, 141, 147, 157, 163, 177], "former": [12, 73, 81, 125, 142], "strategi": [12, 13, 17, 21, 22, 23, 24, 28, 59, 76, 78, 80, 83, 86, 88, 90, 91, 93, 94, 96, 98, 99, 100, 101, 104, 109, 110, 113, 116, 119, 122, 129, 130, 134, 135, 136, 138, 142, 145, 152, 182, 185], "later": [12, 17, 73, 79, 80, 90, 97, 101, 116, 131, 135, 137, 139, 142, 149, 150, 152, 156], "hyperparamet": [12, 16, 25, 39, 42, 46, 59, 92, 95, 96, 97, 102, 103, 111, 115, 118, 120, 127, 131, 132, 133, 137, 147, 149, 156, 157, 170, 171, 178, 179, 180, 181, 182, 183], "allow": [12, 13, 28, 43, 65, 73, 76, 79, 81, 84, 92, 97, 101, 104, 107, 108, 117, 124, 127, 128, 133, 134, 139, 140, 141, 143, 146, 152, 154, 157, 161, 162, 163, 170, 179, 182, 183], "skill": [12, 21, 32, 38, 55, 70, 170, 182], "carri": [12, 21, 32, 38, 46, 55, 70, 84, 95, 107, 126, 170, 182], "basic": [12, 21, 32, 35, 38, 49, 55, 70, 99, 101, 108, 120, 145, 170, 182], "usag": [12, 21, 32, 38, 55, 79, 104, 105, 106, 107, 119, 139, 152, 170, 182], "mainli": [12, 13, 21, 32, 38, 73, 104, 157, 170], "around": [12, 21, 32, 38, 73, 79, 90, 94, 97, 100, 101, 102, 105, 108, 135, 137, 142, 145, 170], "overfit": [12, 15, 17, 21, 28, 32, 38, 43, 46, 50, 52, 55, 57, 58, 59, 60, 95, 96, 100, 108, 109, 110, 113, 117, 118, 119, 122, 126, 131, 132, 133, 135, 136, 137, 152, 156, 161, 165, 170, 172, 175], "valid": [12, 17, 21, 22, 23, 24, 25, 28, 32, 33, 38, 39, 43, 46, 55, 56, 57, 59, 64, 65, 72, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 97, 98, 99, 100, 107, 108, 113, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 130, 133, 135, 136, 142, 143, 144, 146, 147, 148, 150, 151, 152, 153, 154, 155, 161, 162, 165, 170, 175, 177, 179, 182, 183, 185], "principl": [12, 21, 33, 142], "through": [12, 21, 28, 32, 35, 38, 55, 70, 92, 97, 102, 108, 114, 123, 133, 140, 141, 143, 146, 150, 170, 179, 182], "hour": [12, 21, 38, 55, 70, 73, 76, 78, 79, 80, 81, 83, 84, 85, 103, 130, 136, 150, 151, 154, 170, 182], "saw": [13, 28, 39, 46, 79, 81, 84, 85, 101, 102, 115, 117, 133, 138, 141, 143, 146, 150, 152, 153, 154, 157, 161, 162, 163, 171], "parallel": [13, 15, 103, 115, 152, 153, 156, 179], "sequenti": [13, 15, 52, 81, 116, 117, 153, 179], "intern": [13, 43, 65, 81, 85, 96, 101, 109, 110, 113, 114, 122, 123, 133, 145, 147, 150, 152, 179], "machineri": [13, 109, 115], "art": 13, "learn": [13, 15, 16, 22, 23, 26, 27, 28, 33, 36, 37, 39, 41, 43, 50, 57, 59, 63, 65, 67, 68, 71, 73, 76, 77, 79, 81, 82, 83, 84, 85, 86, 88, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 106, 107, 109, 112, 114, 116, 117, 119, 121, 123, 124, 125, 126, 127, 132, 133, 135, 136, 139, 141, 142, 143, 145, 146, 147, 150, 152, 153, 155, 158, 159, 162, 163, 171, 172, 173, 174, 180, 181, 183, 185], "earli": [13, 28, 113, 114, 122, 123], "stop": [13, 28, 86, 88, 91, 98, 105, 113, 114, 117, 122, 123], "stack": 13, "By": [14, 17, 28, 43, 46, 73, 81, 84, 96, 99, 102, 105, 119, 127, 130, 133, 136, 142, 145, 147, 160, 164, 177], "default": [14, 17, 26, 27, 43, 46, 59, 65, 76, 77, 81, 82, 84, 92, 97, 99, 105, 111, 117, 118, 120, 130, 132, 133, 136, 142, 143, 144, 145, 146, 147, 151, 152, 177, 182], "baggingclassifi": [14, 119], "baggingregressor": [14, 110, 111, 118, 119, 120], "draw": [14, 28, 73, 93, 98, 103, 110, 117, 125, 136, 154, 157, 179], "replac": [14, 28, 46, 84, 104, 105, 107, 110, 132], "without": [14, 35, 37, 46, 59, 73, 77, 80, 82, 91, 94, 96, 97, 100, 101, 104, 105, 106, 117, 118, 125, 126, 129, 130, 131, 133, 135, 136, 137, 138, 142, 143, 145, 146, 163, 165, 185], "d": [14, 15, 16, 17, 19, 24, 26, 28, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 73, 80, 94, 165, 172, 177, 179, 181, 185], "answer": [14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 84, 104, 129, 135, 137, 156, 172, 173, 174, 175, 177, 179, 181, 185], "hint": [14, 27, 28, 46, 59, 72, 74, 75, 77, 78, 82, 83, 87, 89, 129, 131, 133, 135, 137, 159, 163, 185], "base_estim": 14, "decid": [14, 21, 73, 79, 101, 107, 124, 127, 129, 135, 152], "resampl": [14, 50, 72, 96, 105, 109, 130, 133, 136], "perform": [14, 16, 17, 18, 21, 22, 23, 25, 28, 33, 34, 46, 59, 63, 65, 72, 75, 76, 78, 79, 80, 81, 83, 84, 85, 86, 88, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 106, 108, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 129, 130, 133, 135, 136, 139, 142, 144, 145, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 161, 170, 177, 179, 182, 183, 185], "correct": [15, 16, 17, 28, 59, 80, 82, 109, 115, 117, 129, 135, 142, 148, 154, 155, 161, 185], "statement": [15, 17, 26, 28, 41, 72, 179, 185], "simultan": 15, "histogram": [15, 28, 65, 73, 74, 75, 81, 93, 98, 103, 104, 107, 114, 116, 123, 141, 150, 154], "acceler": [15, 28, 105, 116], "subsampl": [15, 107, 119, 124, 127], "origin": [15, 65, 76, 79, 80, 84, 85, 99, 100, 103, 104, 109, 110, 115, 116, 119, 129, 132, 135, 136, 137, 139, 140, 145, 157, 158, 162, 163], "bin": [15, 73, 81, 91, 94, 98, 101, 102, 104, 105, 106, 107, 116, 132, 139, 153, 154], "numer": [15, 46, 63, 67, 68, 70, 71, 72, 73, 74, 75, 76, 77, 78, 80, 82, 83, 84, 93, 98, 101, 103, 104, 105, 107, 129, 130, 132, 133, 135, 136, 139, 149, 150, 151, 156, 165, 177, 185], "tend": [15, 28, 94, 117, 118, 132, 133, 137, 145, 152], "true": [15, 17, 26, 28, 41, 46, 52, 57, 59, 65, 72, 80, 91, 94, 95, 96, 99, 100, 101, 102, 104, 105, 106, 107, 108, 110, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 125, 128, 129, 130, 132, 133, 134, 135, 136, 141, 142, 145, 147, 149, 150, 152, 156, 157, 163, 175, 179, 185], "shallow": [16, 109, 115, 117, 161], "deeper": [16, 91, 92, 95, 97, 101, 102, 109, 111, 112, 113, 116, 117, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164], "exist": [16, 73, 76, 94], "maximum": [16, 28, 81, 84, 96, 102, 139, 142, 150, 154, 157, 159, 160, 162, 163, 164, 172, 175], "depth": [16, 17, 35, 74, 75, 102, 107, 109, 115, 116, 117, 118, 129, 131, 135, 137, 141, 148, 155, 157, 158, 159, 160, 161, 162, 163, 164, 172, 175, 177], "rate": [16, 28, 80, 91, 95, 101, 105, 117, 142, 150, 153, 155], "option": [16, 81, 84, 91, 99, 101, 105, 113, 122, 182, 185], "reduc": [16, 17, 39, 43, 46, 90, 94, 95, 116, 117, 118, 119, 125, 133, 134], "sensit": [16, 50, 137, 142, 153, 156, 171], "notic": [17, 80, 81, 97, 101, 107, 108, 130, 132, 133, 135, 136, 137, 139, 141, 142, 147, 149, 156, 163], "tradit": 17, "panda": [17, 28, 35, 46, 59, 63, 70, 72, 73, 74, 75, 76, 77, 78, 79, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 119, 120, 121, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "pd": [17, 28, 46, 59, 63, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 119, 120, 121, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "read_csv": [17, 28, 46, 59, 63, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 97, 98, 100, 103, 104, 105, 106, 109, 112, 119, 121, 128, 129, 130, 131, 133, 134, 135, 136, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "csv": [17, 28, 46, 59, 63, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 97, 98, 100, 103, 104, 105, 106, 109, 112, 119, 121, 128, 129, 130, 131, 133, 134, 135, 136, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "feature_nam": [17, 104, 107, 112, 121, 128, 130, 132, 133, 134, 136, 138, 140, 141, 157, 158, 160, 161, 162, 163, 164], "culmen": [17, 74, 75, 109, 129, 131, 135, 137, 141, 157, 158, 159, 161, 163, 185], "mm": [17, 75, 109, 112, 121, 128, 129, 131, 134, 135, 137, 138, 140, 141, 157, 158, 159, 160, 161, 162, 163, 164, 185], "flipper": [17, 112, 121, 128, 129, 134, 135, 138, 140, 158, 160, 161, 162, 164, 185], "target_nam": [17, 28, 46, 59, 72, 76, 78, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 103, 104, 105, 112, 119, 121, 128, 129, 132, 133, 134, 135, 138, 140, 148, 150, 151, 152, 154, 155, 160, 161, 162, 164, 177, 185], "bodi": [17, 74, 75, 105, 112, 121, 128, 129, 134, 135, 138, 140, 158, 160, 161, 162, 164, 185], "mass": [17, 28, 112, 121, 128, 129, 134, 135, 138, 140, 158, 160, 161, 162, 164, 185], "dropna": [17, 129, 135, 185], "frac": [17, 28], "random_st": [17, 36, 79, 81, 83, 85, 91, 92, 96, 97, 98, 99, 100, 101, 102, 108, 109, 110, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 126, 127, 131, 132, 133, 135, 137, 139, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 161, 163, 177], "reset_index": [17, 131, 137, 141], "drop": [17, 28, 36, 46, 59, 72, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 97, 98, 100, 103, 104, 105, 106, 107, 108, 119, 130, 136, 142, 143, 144, 145, 146, 147, 148, 150, 152, 154, 155, 177], "data": [17, 19, 21, 22, 23, 24, 27, 28, 35, 38, 39, 42, 43, 46, 57, 59, 65, 68, 70, 71, 72, 74, 75, 77, 78, 82, 83, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 173, 177, 179, 182, 185], "therefor": [17, 28, 72, 73, 76, 81, 83, 85, 91, 94, 95, 96, 98, 101, 102, 107, 109, 115, 116, 117, 119, 125, 126, 127, 133, 134, 135, 139, 141, 142, 145, 147, 152, 156, 161], "randomli": [17, 28, 96, 98, 101, 108, 110, 119, 154], "shuffl": [17, 36, 79, 94, 96, 99, 100, 101, 108, 114, 123, 142, 145], "break": [17, 94, 133], "spuriou": 17, "troubl": [17, 99], "notebook": [17, 22, 35, 46, 63, 72, 75, 77, 78, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 95, 96, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 113, 115, 116, 117, 118, 119, 120, 122, 125, 126, 129, 132, 133, 135, 136, 138, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 161, 162, 163, 164, 165, 179], "outsid": [17, 150, 157, 160, 164], "scope": [17, 73, 103, 139, 141, 145, 147], "regressor": [17, 23, 27, 28, 39, 43, 46, 80, 81, 91, 95, 100, 101, 102, 110, 111, 114, 115, 116, 117, 118, 119, 120, 122, 123, 133, 139, 145, 171, 174, 177], "sklearn": [17, 28, 43, 46, 59, 72, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 131, 132, 133, 135, 136, 137, 138, 139, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 154, 155, 156, 157, 159, 161, 162, 163, 164, 177, 179, 181, 185], "randomforestregressor": [17, 108, 115, 117, 119, 121, 122], "except": [17, 86, 88, 143, 146, 157, 163], "exact": [17, 28, 115], "fold": [17, 24, 28, 46, 59, 72, 76, 94, 99, 101, 108, 114, 123, 125, 126, 127, 128, 129, 130, 133, 134, 135, 136, 144, 147, 152, 154, 156, 177, 185], "model_select": [17, 28, 46, 59, 76, 78, 79, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 107, 108, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 131, 133, 135, 136, 137, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 154, 155, 156, 157, 159, 161, 163, 177, 179, 185], "cross_valid": [17, 28, 46, 59, 65, 72, 76, 84, 85, 86, 87, 88, 89, 90, 91, 92, 97, 98, 99, 102, 107, 108, 114, 115, 116, 118, 123, 125, 126, 133, 135, 136, 143, 144, 146, 147, 150, 151, 152, 177, 185], "cv": [17, 28, 46, 65, 72, 76, 85, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 108, 114, 118, 123, 126, 133, 135, 136, 143, 146, 147, 150, 152, 154, 155, 161, 177, 179, 185], "store": [17, 28, 73, 79, 81, 91, 93, 98, 99, 101, 104, 105, 106, 110, 118, 125, 126, 132, 133, 138, 149, 150, 156, 185], "return_train_scor": [17, 28, 59, 102, 133], "count": [17, 46, 59, 72, 73, 75, 79, 81, 83, 84, 91, 94, 99, 104, 105, 106, 107, 130, 136, 157, 177, 185], "rang": [17, 39, 46, 59, 65, 72, 79, 81, 94, 96, 99, 101, 103, 105, 106, 107, 108, 110, 132, 133, 140, 152, 153, 154, 156, 158, 160, 163, 164, 174, 177, 179, 185], "substanti": [17, 177, 185], "almost": [17, 46, 72, 80, 89, 101, 108, 137, 140, 145, 152, 163, 177], "100": [17, 46, 59, 79, 80, 91, 95, 98, 101, 102, 107, 108, 110, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 139, 142, 145, 149, 154, 155, 156, 163, 185], "again": [17, 92, 97, 103, 108, 110, 113, 122, 137, 139, 142, 153], "curv": [17, 41, 57, 59, 92, 97, 113, 122, 135, 140, 141, 142, 165], "n_estim": [17, 109, 110, 113, 115, 116, 117, 118, 119, 120, 121, 122], "numpi": [17, 28, 35, 59, 63, 65, 70, 81, 91, 94, 95, 97, 98, 99, 102, 103, 105, 107, 108, 109, 110, 115, 116, 121, 122, 124, 125, 127, 128, 132, 133, 134, 135, 136, 138, 139, 140, 142, 144, 145, 147, 153, 156, 161, 162, 163, 164, 179], "np": [17, 28, 46, 59, 91, 92, 94, 95, 97, 98, 99, 102, 103, 105, 107, 108, 109, 110, 113, 115, 116, 119, 121, 122, 124, 125, 127, 128, 129, 132, 133, 134, 135, 136, 138, 139, 140, 142, 144, 145, 147, 149, 153, 156, 161, 162, 163, 164, 174, 179], "arrai": [17, 28, 41, 42, 59, 65, 76, 79, 80, 81, 82, 84, 85, 94, 95, 101, 102, 103, 108, 109, 110, 113, 116, 121, 122, 129, 133, 135, 136, 141, 142, 146, 150, 157], "50": [17, 28, 32, 59, 73, 77, 79, 81, 82, 91, 98, 99, 101, 102, 104, 105, 106, 107, 113, 115, 116, 117, 118, 119, 122, 129, 130, 133, 135, 136, 145, 150, 151, 153, 154], "200": [17, 59, 72, 94, 105, 115, 116, 132, 154], "500": [17, 59, 84, 85, 88, 101, 105, 107, 145, 153, 154], "1_000": [17, 28, 94, 113, 122], "decreas": [17, 28, 43, 46, 52, 57, 81, 108, 116, 125, 133, 145, 150, 156], "becom": [17, 92, 95, 97, 102, 108, 116, 117, 150, 154, 163, 179], "reach": [17, 95, 102, 113, 122, 150, 153, 161, 179], "plateau": [17, 95, 113, 122], "experi": [17, 35, 46, 59, 70, 78, 83, 92, 94, 95, 96, 97, 101, 102, 107, 113, 114, 116, 122, 123, 131, 137, 143, 146, 156, 159, 162, 163], "instead": [17, 28, 46, 59, 72, 76, 80, 81, 84, 86, 87, 88, 89, 91, 92, 94, 95, 97, 98, 101, 105, 110, 115, 117, 118, 119, 129, 130, 133, 135, 136, 139, 142, 143, 144, 145, 146, 147, 148, 149, 152, 153, 154, 155, 156, 157, 158, 159, 162, 163, 177], "max_depth": [17, 102, 109, 110, 113, 114, 115, 117, 118, 122, 123, 139, 157, 162, 163, 164, 177], "gap": [17, 102, 133], "begin": [17, 80, 113, 122, 132, 133, 152], "consid": [17, 42, 59, 68, 72, 84, 100, 102, 104, 107, 108, 109, 115, 116, 117, 119, 132, 133, 139, 143, 146, 157, 163, 172, 181, 185], "none": [17, 46, 92, 94, 97, 101, 104, 105, 106, 107, 110, 113, 117, 118, 122, 132, 139, 153, 185], "rf_1_tree": 17, "cv_results_tre": 17, "train_scor": [17, 28, 102, 133], "return": [17, 27, 28, 41, 43, 65, 76, 79, 80, 84, 86, 88, 103, 108, 110, 115, 128, 133, 134, 140, 141, 142, 150, 151, 153, 154, 179], "83120264": 17, "83309064": 17, "83195043": 17, "84834224": 17, "85790323": 17, "86235297": 17, "84791111": 17, "85183089": 17, "82241954": 17, "85045978": 17, "perfect": [17, 41, 55, 91, 101, 108, 115, 124, 126, 127, 142, 161], "r2": [17, 27, 100, 107, 118, 123, 145, 147], "surpris": [17, 83, 94, 99, 100, 124, 127, 157, 163], "memor": [17, 80, 100, 101, 102], "expect": [17, 28, 65, 73, 75, 79, 84, 85, 96, 99, 100, 101, 107, 110, 117, 127, 129, 133, 135, 136, 145, 147, 152, 156, 161], "automat": [17, 65, 73, 79, 81, 84, 86, 88, 101, 151, 152, 177, 182], "prevent": [17, 42, 84, 88, 110, 117, 179], "max_it": [17, 28, 81, 84, 85, 88, 94, 116, 117, 123, 130, 136, 137], "recal": [17, 26, 59, 84, 96, 101, 116, 118, 131, 137, 142, 151, 152, 154, 156, 185], "averag": [17, 28, 59, 80, 91, 94, 98, 99, 101, 105, 107, 108, 110, 115, 116, 117, 118, 119, 122, 125, 133, 137, 138, 142, 145, 147, 185], "small": [17, 39, 72, 75, 76, 85, 94, 97, 99, 101, 102, 108, 110, 115, 116, 117, 129, 131, 133, 135, 137, 139, 148, 150, 155, 179], "behav": [17, 92, 97, 98, 131, 133, 137, 177], "high": [17, 28, 36, 50, 52, 57, 63, 73, 75, 79, 83, 84, 97, 102, 103, 104, 105, 106, 107, 108, 132, 137, 141, 150, 151, 154], "optimum": 17, "m7": [18, 19, 20, 29, 30, 165], "stratif": [19, 165], "framework": [21, 22, 33, 55, 56, 95, 99, 102, 114, 123, 144, 147, 152, 165, 182], "keep": [21, 28, 73, 90, 94, 101, 103, 104, 105, 107, 108, 125, 126, 127, 129, 131, 135, 136, 137, 139, 141, 152, 154], "mind": [21, 73, 101, 108, 117, 124, 125, 126, 127, 154], "metric": [21, 26, 28, 65, 76, 80, 95, 100, 101, 106, 110, 111, 113, 120, 121, 122, 125, 129, 133, 134, 135, 138, 139, 143, 144, 145, 146, 147, 153, 162, 165, 185], "besid": [21, 22, 32, 38, 79, 85, 87, 89, 91, 94, 95, 114, 123, 125, 157, 170, 183], "insight": [21, 32, 35, 46, 63, 73, 100, 102, 110, 124, 127, 140, 141, 145, 154], "addit": [21, 28, 42, 68, 76, 80, 84, 101, 105, 107, 113, 114, 118, 119, 122, 123, 133, 135, 139, 140, 142, 145, 150, 151, 152, 154, 156, 177], "necess": [21, 101], "appropri": [21, 73, 103], "nest": [21, 22, 25, 117, 133, 148, 150, 152, 155, 165, 177, 183, 185], "wise": [22, 132, 152, 153, 183], "encount": [22, 72, 84, 86, 88, 99], "show": [22, 38, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 90, 95, 96, 99, 101, 102, 103, 104, 107, 109, 110, 115, 117, 118, 119, 126, 128, 129, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 146, 150, 151, 152, 153, 154, 156, 157, 159, 162, 163, 164, 170, 179, 182], "comparison": [22, 39, 96, 118, 142], "remov": [23, 34, 46, 73, 105, 108, 116, 127, 129, 135, 150, 153, 185], "dummi": [23, 59, 78, 83, 88, 91, 93, 98, 142, 145], "reli": [23, 73, 79, 81, 108, 109, 126, 142], "ye": [23, 41, 42], "whatev": [23, 153], "chosen": [23, 84, 88, 106, 113, 122, 145, 183], "record": [24, 28, 48, 73, 80, 84, 101, 105, 106, 108], "suppos": [24, 84, 94], "imbalanc": [24, 59, 73, 98, 133, 142, 185], "addition": [24, 81, 103, 133], "suspect": 24, "systemat": [24, 50, 57, 72, 97, 145, 151], "bias": [24, 132], "due": [24, 28, 83, 85, 99, 115, 119, 147, 157], "factor": [24, 28, 91, 137, 145], "devic": [24, 28], "socioeconom": 24, "most": [24, 28, 46, 59, 73, 79, 80, 83, 84, 88, 89, 92, 93, 94, 97, 98, 101, 104, 108, 109, 110, 116, 125, 130, 133, 136, 137, 138, 141, 142, 143, 146, 150, 153, 157, 163, 172, 174, 177, 179], "suitabl": 24, "abil": [24, 102, 142, 148, 155], "stratifi": [24, 93, 98, 99, 185], "leav": [24, 80, 92, 97, 103, 117, 154, 161, 163], "inner": [25, 96, 114, 123, 133, 152, 177], "outer": [25, 96, 114, 122, 123, 132, 133, 152, 177, 185], "balanc": [26, 59, 102, 117, 133, 142, 143, 145, 146, 185], "roc": [26, 142], "auc": [26, 142], "precis": [26, 28, 57, 85, 117, 140, 142, 143, 146, 150], "regular": [26, 36, 38, 39, 43, 46, 108, 110, 132, 154, 162, 165, 183], "assum": [26, 28, 42, 43, 52, 68, 88, 99, 100, 108, 133, 139, 141, 177], "logist": [26, 40, 41, 43, 65, 79, 81, 84, 85, 86, 87, 88, 89, 92, 93, 94, 97, 98, 99, 124, 127, 130, 131, 136, 137, 141, 142, 151, 157, 165], "stronger": [26, 105, 110, 137, 156], "lead": [26, 57, 73, 84, 85, 88, 94, 100, 103, 115, 117, 118, 119, 133, 135, 136, 137, 142, 149, 152, 153, 154, 156], "lower": [26, 27, 28, 46, 52, 88, 94, 95, 98, 101, 103, 105, 108, 117, 122, 133, 135, 137, 142, 145, 147, 152, 156, 161, 163], "r": [27, 97, 100, 101, 107, 108, 144, 145, 147], "absolut": [27, 28, 46, 91, 95, 101, 102, 111, 112, 113, 115, 116, 120, 121, 122, 129, 130, 134, 135, 136, 138, 139, 144, 145, 147], "median": [27, 90, 91, 96, 101, 107, 108, 118, 123, 125, 126, 133, 136, 145, 146, 173, 174], "cross_val_scor": [27, 59, 94, 96, 100, 101, 119, 127, 143, 144, 146, 147, 148, 155], "model_a": 27, "neg_mean_squared_error": [27, 133, 147], "strictli": 27, "model_b": 27, "rememb": [27, 46, 73, 103, 110, 129, 130, 135, 136, 137, 181, 185], "alia": 27, "neg": [27, 28, 46, 65, 101, 102, 103, 108, 133, 134, 140, 142], "guarante": [27, 108], "either": [27, 68, 80, 94, 97, 99, 124, 125, 127, 134, 141, 142, 145], "open": [28, 46, 59, 72, 77, 79, 82, 100, 104, 105, 177], "bike_rid": [28, 105], "command": [28, 46, 59, 72, 177, 185], "cycl": [28, 105], "index_col": [28, 100, 105, 153, 154, 179], "parse_d": [28, 100, 105], "index": [28, 46, 73, 80, 94, 99, 100, 105, 108, 110, 116, 125, 129, 133, 135, 137, 141, 146, 150, 152, 157], "appendix": [28, 91, 92, 95, 97, 101, 102, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164], "remind": 28, "cheap": [28, 48, 85], "sensor": [28, 73, 105], "gp": [28, 105], "cyclist": [28, 105], "meter": [28, 105], "expens": [28, 72, 116, 150, 183], "blindli": 28, "introduc": [28, 35, 80, 81, 101, 108, 110, 125, 129, 130, 133, 135, 136, 139, 140, 145, 147, 165], "flavor": 28, "classic": 28, "newton": 28, "second": [28, 36, 72, 81, 84, 87, 89, 93, 98, 99, 105, 109, 115, 116, 132, 145, 154, 161, 163], "p_": 28, "meca": 28, "rho": 28, "sc_x": 28, "v_": 28, "c_r": 28, "mg": 28, "co": 28, "alpha": [28, 43, 46, 73, 94, 97, 107, 108, 109, 110, 115, 121, 131, 132, 133, 134, 137, 138, 139, 140, 141, 145, 157, 161, 162, 163, 164], "sin": 28, "ma": 28, "v_d": 28, "air": 28, "densiti": [28, 132], "kg": [28, 158], "m": [28, 84, 94], "frontal": 28, "c_x": 28, "drag": 28, "coeffici": [28, 36, 39, 42, 46, 107, 130, 131, 133, 135, 136, 137, 139, 140, 141, 145, 172], "v_a": 28, "roll": 28, "rider": 28, "bicycl": 28, "standard": [28, 46, 73, 76, 81, 85, 86, 88, 93, 94, 96, 98, 101, 102, 108, 114, 123, 133, 142, 152], "graviti": 28, "radian": 28, "equat": [28, 138, 141], "complex": [28, 43, 60, 79, 85, 86, 88, 90, 95, 104, 115, 130, 136, 137, 143, 146, 161, 162], "term": [28, 73, 80, 81, 100, 101, 110, 112, 115, 116, 119, 121, 125, 130, 136, 140, 157, 162], "within": [28, 33, 76, 94, 96, 99, 101, 107, 108, 110, 114, 116, 123, 125, 132, 133, 144, 147, 152, 153, 157, 164], "parenthesi": 28, "produc": [28, 163], "fight": [28, 38], "wind": 28, "resist": 28, "tire": 28, "floor": 28, "third": [28, 115, 132, 135], "hill": 28, "forward": [28, 105], "fourth": 28, "last": [28, 35, 46, 68, 84, 99, 104, 106, 130, 132, 133, 136, 139, 141, 142, 143, 146], "hi": [28, 106], "simplifi": [28, 46, 100, 101, 125, 141, 156, 158, 177], "beta_": 28, "closer": [28, 43, 99, 105, 131, 133, 137, 141], "previous": [28, 46, 72, 84, 85, 92, 97, 100, 104, 109, 115, 116, 117, 118, 129, 132, 135, 137, 139, 141, 145, 148, 150, 152, 154, 155, 161, 162], "part": [28, 73, 84, 85, 96, 102, 108, 116, 127, 128, 133, 134, 137, 142, 157, 159, 163], "cube": 28, "multipli": [28, 136, 147], "sine": 28, "angl": 28, "arc": 28, "tangent": 28, "arctan": 28, "ourself": [28, 142], "clip": 28, "brake": 28, "preprocess": [28, 35, 46, 59, 64, 65, 72, 73, 76, 84, 85, 86, 87, 88, 89, 90, 92, 94, 97, 98, 99, 103, 107, 108, 110, 116, 119, 131, 132, 133, 135, 136, 137, 139, 141, 145, 148, 150, 151, 152, 154, 155, 156, 165, 177, 179, 181, 185], "linear_model": [28, 43, 46, 72, 76, 79, 81, 84, 85, 86, 88, 90, 94, 98, 99, 107, 108, 110, 127, 131, 132, 133, 135, 136, 137, 138, 139, 141, 142, 145, 147, 151, 157, 162, 164, 177, 179, 181], "ridgecv": [28, 46, 107, 108, 133], "shufflesplit": [28, 91, 92, 93, 95, 97, 98, 99, 100, 101, 102, 133], "n_split": [28, 91, 95, 96, 98, 99, 100, 101, 102, 108, 123, 133, 146, 152], "mae": [28, 122, 129, 134, 135, 147], "return_estim": [28, 46, 101, 107, 108, 114, 118, 123, 125, 130, 133, 136, 152, 185], "subsequ": [28, 70, 94, 102, 114, 117, 123, 124, 127, 129, 130, 132, 133, 135, 136, 181, 182], "Be": [28, 73, 84, 85, 86, 88, 102, 117, 150, 151, 177], "awar": [28, 32, 33, 73, 80, 84, 85, 86, 88, 90, 102, 104, 117, 136, 150, 151, 177], "investig": [28, 81, 100, 108, 111, 114, 118, 120, 123, 125, 157], "consequ": [28, 99, 110, 116, 119], "003": [28, 76, 84, 87, 88, 89, 151, 152], "obtain": [28, 46, 59, 72, 76, 80, 84, 85, 86, 88, 90, 94, 96, 98, 100, 101, 105, 107, 108, 111, 117, 118, 120, 127, 131, 132, 133, 137, 138, 142, 145, 152, 153, 154, 155, 179], "closest": [28, 80, 132], "watt": [28, 105], "70": [28, 73, 90, 104], "90": [28, 73, 79, 81, 91, 94, 99, 103, 134], "neg_mean_absolute_error": [28, 91, 95, 101, 102, 115, 116, 117, 120, 122, 129, 135, 147], "request": [28, 84, 116, 133], "h": [28, 108], "beta": 28, "cadenc": [28, 105], "turn": [28, 72, 105, 129, 135], "pedal": [28, 105], "rotat": [28, 73, 94, 103, 105], "per": [28, 73, 76, 78, 79, 80, 81, 83, 84, 85, 96, 101, 103, 105, 107, 116, 119, 130, 136, 139, 142, 150, 151, 152, 154], "minut": [28, 32, 85, 105, 154], "beat": [28, 105], "1000": [28, 46, 85, 105, 117, 122, 123, 137, 142, 144, 145, 147, 156], "activ": [28, 68, 136, 153, 179], "early_stop": [28, 117, 123], "60": [28, 73, 80, 81, 90, 104, 105, 108, 110, 120, 153, 154, 161, 163], "80": [28, 72, 90, 91, 104, 148, 155], "consider": [28, 117, 140], "test_scor": [28, 76, 84, 85, 87, 88, 89, 90, 91, 94, 96, 97, 98, 99, 100, 101, 102, 107, 115, 116, 118, 123, 125, 126, 127, 133, 135, 136, 151, 152, 155, 157, 163], "dictionari": [28, 65, 76, 101], "made": [28, 34, 41, 73, 92, 94, 97, 100, 101, 106, 115, 142, 147, 151], "ignor": [28, 72, 84, 85, 87, 88, 89, 90, 91, 108, 136, 156], "datafram": [28, 46, 59, 72, 73, 74, 75, 79, 80, 81, 84, 85, 90, 91, 93, 94, 96, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 110, 115, 117, 118, 120, 121, 123, 125, 126, 132, 133, 136, 139, 140, 141, 142, 146, 147, 150, 152, 153, 154, 156, 157, 161, 162, 163, 164, 177, 185], "account": [28, 80, 91, 94, 104, 108, 132, 136, 142, 177], "date": [28, 85, 94, 104, 105], "hesit": 28, "uniqu": [28, 59, 79, 84, 94, 99, 110, 116, 150, 154, 163], "dai": 28, "datetimeindex": [28, 105], "went": 28, "df": [28, 72, 153], "capac": [28, 95], "leaveonegroupout": [28, 100], "had": [28, 101, 108, 135, 139, 141], "indic": [28, 73, 85, 94, 101, 105, 107, 108, 110, 125, 128, 133, 134, 136, 156], "differenti": [28, 70, 73, 152, 158], "integ": [28, 68, 76, 79, 84, 86, 88, 94, 104, 106, 108, 110, 130, 136, 142, 154, 156, 185], "align": [28, 103, 106, 132, 137, 139, 154], "pessimist": 28, "optimist": [28, 80, 94, 96, 101], "deviat": [28, 46, 76, 81, 94, 96, 101, 102, 108, 114, 123, 133, 152], "analys": [28, 79, 108], "reus": [28, 128, 134, 143, 146, 152], "train_indic": 28, "test_indic": 28, "data_linear_model_train": 28, "data_linear_model": 28, "iloc": [28, 82, 99, 105, 107, 108, 109, 110, 115, 142, 150, 161], "data_linear_model_test": 28, "data_train": [28, 79, 81, 85, 100, 101, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 160, 162, 163, 164], "data_test": [28, 79, 80, 81, 82, 85, 100, 101, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 161, 162, 163, 164], "target_train": [28, 79, 81, 83, 85, 99, 100, 101, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 160, 162, 163, 164], "target_test": [28, 79, 80, 81, 82, 83, 85, 99, 100, 101, 111, 112, 113, 117, 120, 121, 122, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 163], "scatter": [28, 75, 103, 107, 110, 112, 115, 121, 132, 136, 158, 160, 162, 163, 164], "catastroph": [28, 81], "portion": 28, "time_slic": 28, "slice": 28, "2020": [28, 105], "00": [28, 81, 101, 105, 134, 138, 150], "05": [28, 30, 67, 91, 94, 98, 99, 100, 105, 107, 110, 115, 118, 121, 122, 131, 133, 136, 137, 142, 152, 157, 161, 163, 165], "data_test_linear_model_subset": 28, "data_test_subset": [28, 127], "target_test_subset": 28, "pm": 28, "until": [28, 117, 142], "accur": [28, 42, 109, 133, 142], "motiv": [32, 118], "known": [32, 73, 106, 107, 133, 134, 141, 142, 145, 150, 157], "caveat": [32, 124, 127, 152, 165], "practic": [32, 57, 73, 76, 79, 80, 83, 90, 96, 98, 99, 101, 117, 119, 133, 142, 143, 145, 146, 152, 153, 154], "magic": [33, 100], "tool": [33, 35, 76, 85, 103, 110, 141, 152, 154], "margin": [33, 94, 108], "gain": [33, 35, 63, 73, 76, 78, 79, 80, 81, 83, 84, 85, 95, 103, 111, 120, 124, 125, 127, 130, 136, 141, 145, 150, 151, 154], "tackl": [33, 57, 126, 130, 136], "selector": [33, 84, 85, 86, 87, 88, 89, 124, 127, 136, 148, 150, 152, 154, 155], "recurs": 33, "main": [34, 46, 76, 81, 87, 89, 117, 125, 135, 139, 156, 165], "advantag": [34, 76, 125, 132], "fine": [34, 39, 85, 89, 152, 156, 182], "noisi": [34, 60, 102, 108, 110, 137, 156, 161], "teach": [35, 49], "beginn": 35, "strong": [35, 103, 108, 137, 156], "background": 35, "bring": 35, "vast": 35, "busi": 35, "intellig": 35, "industri": 35, "scientif": [35, 119], "discoveri": 35, "pillar": 35, "modern": 35, "field": [35, 73, 185], "central": 35, "easili": [35, 72, 73, 80, 81, 84, 132, 135, 136, 161], "yet": [35, 72, 85, 87, 89, 133, 139], "dovetail": 35, "ecosystem": 35, "languag": 35, "step": [35, 46, 59, 73, 79, 80, 81, 85, 90, 117, 119, 125, 126, 129, 130, 131, 133, 135, 136, 137, 139, 141, 144, 147, 150, 151, 152, 154, 185], "lesson": [35, 151], "fundament": [35, 55, 97, 145], "stone": 35, "artifici": 35, "mine": 35, "cookbook": 35, "failur": [35, 55, 136], "session": [35, 152, 154], "octob": 35, "2022": 35, "month": [35, 106, 142], "enrol": 35, "quizz": 35, "execut": [35, 129, 135, 148, 154, 155, 179], "platform": 35, "purpos": [35, 84, 85, 96, 98, 100, 101, 102, 112, 121, 124, 125, 127, 131, 133, 137, 142, 151, 177], "educ": [35, 73, 79, 84, 85, 86, 87, 88, 89, 103, 119, 130, 136, 141, 148, 150, 152, 154, 155], "prior": [35, 70], "matplotlib": [35, 70, 73, 81, 91, 94, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 134, 136, 137, 138, 140, 141, 142, 145, 146, 157, 161, 162, 163, 164], "quick": [35, 70, 73, 104, 107, 116, 158], "publicli": 35, "cite": 35, "project": [35, 103, 153], "zenodo": 35, "archiv": [35, 90, 94], "doi": 35, "5281": 35, "7220306": 35, "repositori": [35, 101, 107], "inria": 35, "publish": [35, 101, 107], "static": 35, "rocket": 35, "top": [35, 73, 142, 153, 154, 160, 161, 164], "interact": [35, 46, 73, 103, 105, 129, 130, 133, 135, 136, 139, 153, 154, 156, 179], "cell": [35, 73, 77, 79, 81, 82, 85, 90, 96, 101, 109, 115, 116, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "binder": 35, "video": [35, 90, 172], "youtub": 35, "playlist": 35, "channel": 35, "www": [35, 73, 80, 101, 107], "pl2oka_2qdj": 35, "m44koooi7x8tu85wr4ez4f": 35, "version": [35, 81, 104, 105, 116, 118, 132, 142, 154, 159, 163], "host": [35, 154], "fun": 35, "infer": [36, 105, 125, 136, 140, 182], "importance_permut": 36, "correl": [36, 46, 73, 103, 106, 108, 119, 125, 133, 139], "divid": [36, 81, 91, 94, 102, 134, 142, 152, 154], "receiv": [36, 142], "cardin": [36, 84, 108], "independ": [36, 84, 94, 96, 100, 110, 116, 117, 132, 133, 142, 145, 151, 154], "could": [36, 59, 72, 73, 79, 80, 81, 83, 84, 85, 91, 92, 94, 96, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 109, 116, 118, 119, 123, 125, 127, 128, 132, 133, 134, 136, 138, 139, 140, 141, 142, 143, 145, 146, 151, 152, 153, 156, 161], "m4": [37, 40, 44, 165], "parametr": [38, 128, 134, 138, 140, 154, 157, 162, 164, 171], "implic": 38, "dimension": [38, 65, 70, 73, 94, 103, 124, 127, 129, 132, 135, 137, 141, 150, 153, 157], "effect": [38, 43, 46, 58, 59, 81, 92, 96, 97, 107, 108, 110, 117, 118, 119, 131, 136, 137, 159, 162, 163, 165], "relationship": [38, 73, 79, 84, 85, 100, 103, 108, 110, 132, 133, 136, 139, 140, 142, 150, 152, 154, 158], "adjust": [39, 46, 57, 84, 129, 135, 150, 153, 157], "successfulli": [39, 98, 115, 132], "scale": [39, 43, 46, 59, 71, 72, 81, 84, 85, 92, 94, 97, 101, 110, 116, 130, 132, 136, 145, 150, 151, 154, 156, 177, 183, 185], "approxim": [39, 59, 76, 80, 81, 89, 91, 94, 98, 110, 122, 132, 133, 135, 137, 156, 185], "dynam": 39, "linearli": [39, 41, 43, 79, 103, 132, 139], "extra": [39, 72, 105, 137, 143, 146, 152], "Is": [41, 42, 78, 83, 104], "linearregress": [41, 43, 133, 135, 138, 139, 145, 147, 162, 164, 177], "coef_": [41, 42, 46, 107, 108, 126, 131, 133, 136, 137, 138, 139, 141, 151, 182], "intercept_": [41, 42, 138, 139], "boundari": [41, 132, 141, 157, 159, 161, 162, 163], "predict_proba": [41, 43, 81, 131, 132, 137, 141, 142, 147, 150, 157, 159, 163], "probabl": [41, 90, 101, 107, 108, 109, 131, 132, 135, 137, 147, 157, 179], "extract": [42, 76, 94, 105, 108, 110, 133, 150, 154, 185], "straight": [42, 85, 132, 137, 139, 141, 162], "float": [42, 105, 107, 116, 130, 136, 154], "express": [42, 46, 60, 72, 89, 92, 97, 101, 107, 108, 115, 129, 132, 135, 137, 139, 153, 156, 161, 179], "ensur": [42, 43, 94, 96, 99, 103, 117, 131, 132, 137], "extrapol": [42, 132, 160, 164, 171], "regardless": [42, 103, 132, 133, 157], "inher": [42, 108], "robust": [43, 89, 101, 108, 118, 133], "outlier": [43, 91, 107, 133, 147, 161], "wide": [43, 79], "forc": [43, 81, 87, 89, 109, 116, 119, 133, 138], "penal": [43, 133], "scientist": [43, 96], "prepar": 43, "plan": [43, 159, 163], "strength": [43, 46, 131, 133, 137, 154], "penalti": [43, 46, 108, 137], "magnitud": [43, 108, 130, 131, 133, 136, 137, 172], "l2": 43, "confid": [43, 131, 132, 137, 141, 142], "ames_housing_no_miss": [46, 72, 104, 133, 177], "ames_h": [46, 72, 90, 104, 133, 144, 145, 147, 177], "salepric": [46, 72, 90, 104, 133, 144, 145, 147, 177], "numerical_featur": [46, 72, 104, 177], "lotfrontag": [46, 72, 90, 104, 133, 177], "lotarea": [46, 72, 90, 104, 133, 177], "masvnrarea": [46, 72, 104, 177], "bsmtfinsf1": [46, 72, 104, 177], "bsmtfinsf2": [46, 72, 104, 177], "bsmtunfsf": [46, 72, 104, 177], "totalbsmtsf": [46, 72, 104, 177], "1stflrsf": [46, 72, 104, 177], "2ndflrsf": [46, 72, 104, 177], "lowqualfinsf": [46, 72, 104, 177], "grlivarea": [46, 72, 104, 177], "bedroomabvgr": [46, 72, 104, 177], "kitchenabvgr": [46, 72, 104, 177], "totrmsabvgrd": [46, 72, 104, 177], "fireplac": [46, 72, 104, 177], "garagecar": [46, 72, 104, 177], "garagearea": [46, 72, 104, 177], "wooddecksf": [46, 72, 104, 177], "openporchsf": [46, 72, 104, 177], "enclosedporch": [46, 72, 104, 177], "3ssnporch": [46, 72, 104, 177], "screenporch": [46, 72, 90, 104, 177], "poolarea": [46, 72, 90, 104, 133, 177], "miscval": [46, 72, 90, 104, 177], "data_numer": [46, 76, 78, 79, 81, 83, 177], "largest": [46, 107], "1e0": 46, "000": [46, 48, 72, 88, 91, 99, 101, 107, 115, 116, 124, 127, 141, 145, 155, 156], "1e5": 46, "larger": [46, 81, 84, 102, 117, 122, 135, 137, 139, 140, 148, 149, 152, 155, 156, 157, 174], "notat": 46, "box": [46, 96, 107, 114, 123, 125, 126, 133, 136, 143, 146, 185], "garag": 46, "just": [46, 88, 100, 101, 102, 105, 108, 109, 110, 113, 117, 119, 122, 133, 136, 139, 141, 142], "logspac": [46, 92, 97, 107, 133, 149, 156], "num": [46, 73, 79, 84, 85, 86, 87, 88, 89, 90, 91, 92, 95, 97, 98, 103, 107, 110, 115, 119, 121, 128, 130, 133, 134, 136, 138, 140, 148, 149, 150, 152, 154, 155, 156], "101": [46, 185], "alpha_": [46, 133], "fall": [46, 101, 145, 154], "preprocessor": [46, 85, 86, 87, 88, 89, 90, 94, 104, 119, 130, 136, 148, 150, 151, 152, 154, 155, 177, 185], "deal": [46, 80, 84, 85, 89, 99, 100, 103, 106, 124, 127, 132, 135, 139, 141, 147, 158, 177], "onehotencod": [46, 71, 72, 84, 85, 86, 87, 88, 89, 90, 130, 133, 136], "categorical_featur": [46, 90, 104], "yield": [46, 89, 117, 138], "long": [46, 89, 104, 105, 107, 129, 135, 151], "splinetransform": [46, 132, 139], "influenc": [46, 95, 102, 108, 133, 139, 145, 154, 170], "nystroem": [46, 129, 131, 132, 135, 137, 139], "kernel": [46, 92, 97, 129, 131, 132, 135, 137, 139], "poli": [46, 129, 132, 135, 139], "n_compon": [46, 129, 131, 132, 135, 137, 139], "300": [46, 81, 105, 110, 115, 117, 121, 128, 134, 138, 140, 161], "studi": [48, 59, 73, 92, 94, 97, 101, 185], "apart": [48, 108], "estat": [48, 101], "thousand": [48, 101, 107, 108], "entertain": 48, "spaciou": 48, "updat": [48, 90, 185], "bedroom": [48, 101, 107, 108], "bathroom": 48, "lakeview": 48, "97630": 48, "1st": [48, 73, 84, 135], "nightlif": 48, "privat": [48, 73, 79, 84, 85, 136, 150, 154], "backyard": 48, "buyer": 48, "market": 48, "kind": [48, 73, 85, 89, 108, 122, 139, 145, 172, 185], "sub": [49, 96, 97, 157], "vocabulari": 49, "varianc": [50, 55, 57, 94, 102, 108, 145, 165], "low": [52, 63, 73, 75, 79, 83, 94, 103, 105, 107, 108, 110, 117, 132, 137, 142, 157, 161, 179], "littl": [52, 79, 94, 99, 153], "reduct": [52, 108], "steadi": 52, "label": [52, 57, 68, 75, 79, 84, 85, 88, 98, 99, 100, 103, 109, 110, 115, 121, 134, 136, 140, 141, 142, 143, 146, 162, 164], "slow": [52, 89, 117], "tradeoff": [52, 57, 102], "m2": [54, 56, 58, 165], "trade": [55, 57, 76, 131, 132, 137, 161, 165, 170, 172], "off": [55, 57, 73, 76, 91, 104, 131, 132, 137, 142, 161, 165, 170, 172], "character": [55, 108, 142], "why": [55, 59, 65, 73, 85, 93, 98, 105, 108, 137, 147, 161, 179], "aris": [55, 73], "Then": [55, 70, 76, 81, 85, 91, 101, 110, 114, 118, 123, 124, 125, 126, 127, 131, 132, 137, 139, 141, 143, 144, 145, 146, 147, 152, 154, 157], "quantifi": [55, 73, 102, 108, 128, 134, 185], "contrast": [55, 73, 84, 101, 115, 140, 162], "importantli": 55, "emphas": [55, 118], "happen": [57, 65, 73, 86, 88, 119, 135, 157], "suffer": [57, 81, 106], "lack": 57, "captur": [57, 73, 102, 108, 132, 133], "neither": [57, 99], "nor": 57, "still": [57, 76, 81, 84, 85, 89, 102, 107, 108, 109, 111, 120, 131, 132, 133, 137, 139, 145, 153, 154, 157], "variat": [57, 76, 101, 102, 108, 110, 133, 145], "fulli": [57, 84, 101, 113, 117, 122], "determin": [57, 84, 92, 97, 142, 145], "irreduc": 57, "decompos": 57, "chapter": [57, 185], "diagnos": 57, "blood_transfus": [59, 92, 97, 106, 142, 143, 146], "propos": [59, 185], "multiclass": [59, 157, 159, 163, 185], "proport": [59, 95, 98, 106, 137, 142, 145, 185], "twice": [59, 142, 185], "value_count": [59, 73, 74, 75, 83, 84, 99, 104, 106, 142, 185], "dummyclassifi": [59, 78, 83, 88, 93, 98, 142], "most_frequ": [59, 83, 88, 98, 104, 142], "balanced_accuraci": [59, 142, 143, 146, 185], "remaind": [59, 84, 85, 87, 89, 119, 148, 150, 152, 154, 155], "add": [59, 105, 108, 115, 116, 125, 129, 133, 135, 139, 140, 142, 143, 146, 160, 164], "faster": [59, 65, 73, 81, 117, 148, 155], "distanc": [59, 81, 103, 137, 156], "normal": [59, 81, 90, 94, 103, 104, 105, 106, 107, 108, 116, 141, 142, 145, 149, 150, 156, 157, 163], "irrelev": 59, "make_pipelin": [59, 65, 76, 81, 84, 85, 86, 87, 88, 89, 94, 97, 98, 99, 104, 107, 108, 110, 116, 119, 124, 125, 126, 127, 129, 131, 132, 133, 135, 136, 137, 139, 141, 156], "get_param": [59, 92, 97, 111, 120, 131, 137, 151, 181, 185], "n_neighbor": [59, 77, 82, 149, 156, 182, 185], "clearli": [59, 83, 95, 97], "param_rang": [59, 97, 102, 113, 122, 129, 135], "affirm": 59, "highli": [60, 73, 132], "much": [60, 73, 88, 89, 91, 98, 101, 102, 108, 116, 117, 119, 120, 125, 132, 135, 179], "m1": [62, 64, 67, 165], "adult_censu": [63, 73, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 93, 98, 103, 119, 130, 136, 148, 150, 151, 152, 153, 154, 155], "comma": [63, 73, 104, 105, 106], "file": [63, 73, 79, 80, 104, 105, 106, 143, 146, 185], "alreadi": [63, 73, 79, 80, 81, 85, 97, 108, 117, 119, 132, 133, 149, 154, 156], "packag": [63, 116, 133, 143, 146], "survei": 63, "incom": [63, 73, 79, 83, 91, 101, 103, 107, 108], "salari": [63, 107], "seaborn": [63, 73, 74, 75, 81, 103, 105, 106, 107, 108, 109, 110, 115, 121, 131, 134, 137, 138, 139, 140, 141, 150, 153, 157, 158, 161, 162, 163, 164], "visual": [63, 67, 72, 81, 84, 95, 97, 100, 101, 103, 105, 110, 115, 128, 130, 132, 133, 134, 136, 141, 142, 145, 150, 153, 157, 158, 162, 163, 165, 182], "scipi": [63, 103, 117, 120, 154], "organ": [63, 163], "five": [65, 80, 85, 101], "overlap": [65, 75, 76, 110, 147, 154], "lie": 65, "fewer": [65, 145], "jupyt": [67, 79, 81, 82, 85, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164, 165], "ordin": [68, 72, 85, 88, 103, 116], "string": [68, 79, 84, 85, 86, 88, 101, 104, 143, 146, 147, 152, 185], "meaning": [68, 84, 89, 124, 126, 127, 136, 140, 145, 150], "hot": [68, 84, 85, 130, 132, 136], "represent": [68, 79, 81, 82, 84, 85, 87, 89, 90, 95, 96, 101, 106, 107, 109, 116, 119, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "compani": [68, 100], "sector": 68, "construct": [68, 110, 119, 154], "retail": 68, "energi": [68, 100, 105], "insur": 68, "phone": 68, "sale": [68, 72, 85, 136], "depart": 68, "employe": 68, "profit": 68, "quarter": [68, 100], "head": [68, 72, 73, 75, 79, 80, 84, 85, 101, 104, 105, 106, 107, 108, 129, 135, 140, 150, 151, 154, 158], "tabl": [70, 73, 117, 119, 150], "progress": [70, 149, 156], "attent": [70, 109], "extend": [70, 79], "mix": [70, 71, 73, 85, 94, 161], "unknown": [70, 84, 86, 88, 119, 145], "notabl": [71, 142], "ordinalencod": [71, 84, 85, 86, 87, 88, 89, 119, 148, 150, 152, 154, 155, 177], "200_000": [72, 90], "astyp": [72, 90, 104, 132, 149, 154, 156, 161], "int": [72, 76, 90, 109, 154], "did": [72, 73, 81, 84, 93, 98, 100, 101, 104, 105, 117, 125, 127, 132, 137, 139, 142, 143, 146, 150, 151, 152, 154, 156, 159, 163, 182], "convert": [72, 101, 103, 109, 110, 121, 156], "info": [72, 80, 94, 104, 105, 106, 107], "examin": [72, 133], "36": [72, 75, 101, 104, 117, 120, 129, 135, 154, 158, 162], "select_dtyp": [72, 104, 130, 136, 144, 145, 147], "make_column_selector": [72, 84, 85, 86, 87, 88, 89, 119, 136, 148, 150, 152, 154, 155], "shown": [72, 73, 101, 115, 132, 137, 141, 145, 152, 159, 163], "among": [72, 84, 85, 99, 125, 156], "quantit": [72, 79, 115, 128, 134, 138], "exclud": [72, 73, 125], "overallqu": [72, 104], "overallcond": [72, 104], "yearbuilt": [72, 104, 133], "sole": [72, 127, 145], "treat": [72, 85, 90, 133, 142], "issu": [72, 73, 84, 85, 99, 100, 101, 105, 106, 116, 132, 133, 139], "rare": [72, 73, 84, 85, 104, 119, 133], "handle_unknown": [72, 84, 85, 86, 87, 88, 89, 90, 119, 136, 148, 150, 152, 154, 155], "mere": 72, "chanc": [72, 98, 101, 110, 124, 127, 142, 163], "partit": [72, 76, 143, 146, 157, 159, 161, 162, 163], "classifact": 72, "li": [72, 84, 110], "place": [73, 127, 133, 139], "workflow": 73, "1994": [73, 94, 103], "download": [73, 101, 107], "openml": [73, 80], "webpag": 73, "1590": [73, 80], "manipul": [73, 77, 82, 92, 97, 101], "tutori": 73, "earn": [73, 103, 150], "50k": [73, 78, 79, 80, 81, 82, 83, 85, 103, 150, 154], "year": [73, 79, 103, 104, 133, 150], "heterogen": [73, 79, 85, 104, 133], "employ": 73, "covari": 73, "workclass": [73, 79, 84, 85, 136, 150, 152, 154], "marit": [73, 79, 84, 85, 136, 150, 152, 154], "occup": [73, 79, 84, 85, 107, 108, 136, 150, 152, 154], "race": [73, 79, 84, 85, 105, 136, 150, 152, 154], "sex": [73, 79, 84, 85, 136, 150, 152, 154], "loss": [73, 76, 78, 79, 80, 81, 83, 84, 85, 103, 130, 136, 145, 147, 150, 151, 154], "week": [73, 76, 78, 79, 80, 81, 83, 84, 85, 103, 130, 136, 150, 151, 154], "countri": [73, 79, 84, 85, 136, 150, 152, 154], "11th": [73, 79, 84, 136, 150, 154], "marri": [73, 79, 84, 85, 136, 150, 154], "op": [73, 79, 84, 136, 150, 154], "inspct": [73, 79, 84, 136, 150, 154], "own": [73, 79, 84, 85, 117, 136, 139, 150, 154], "child": [73, 79, 84, 85, 136, 150, 154], "male": [73, 79, 84, 85, 136, 150, 154], "lt": [73, 79, 80, 154], "hs": [73, 79, 84, 85, 136, 150, 154], "grad": [73, 79, 84, 85, 136, 150, 154], "civ": [73, 79, 84, 85, 136, 150, 154], "spous": [73, 79, 84, 85, 136, 150, 154], "farm": [73, 79, 84, 136, 150, 154], "fish": [73, 79, 84, 136, 150, 154], "husband": [73, 79, 84, 85, 136, 150, 154], "white": [73, 79, 84, 85, 103, 131, 132, 136, 137, 141, 150, 154, 163], "local": [73, 79, 84, 90, 106, 136, 139, 150, 154], "gov": [73, 79, 84, 136, 150, 154], "assoc": [73, 79, 84, 136, 150, 154], "acdm": [73, 79, 84, 136, 150, 154], "protect": [73, 79, 84, 136, 150, 154], "serv": [73, 79, 84, 87, 89, 105, 136, 142, 150, 154], "gt": [73, 79, 154], "colleg": [73, 79, 84, 136, 150, 154], "7688": [73, 79, 130, 136, 150, 151, 154], "femal": [73, 79, 84, 85, 136, 150, 154], "revenu": [73, 83, 84, 103, 133], "target_column": [73, 109, 131, 137, 141, 157, 158, 159, 163], "37155": [73, 83], "11687": [73, 83], "dtype": [73, 75, 79, 80, 81, 82, 83, 84, 85, 86, 88, 91, 98, 101, 103, 104, 105, 106, 107, 109, 133, 141, 142, 150, 154, 156, 157], "int64": [73, 75, 79, 83, 84, 104, 106, 156], "imbal": [73, 106, 137], "special": [73, 105], "healthi": 73, "ill": [73, 133], "numerical_column": [73, 76, 78, 79, 81, 83, 85, 87, 89, 130, 136, 151], "categorical_column": [73, 84, 85, 86, 87, 88, 89, 130, 136, 150, 152, 154], "all_column": 73, "print": [73, 76, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 94, 96, 97, 99, 100, 101, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 127, 128, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 152, 154, 155, 156, 157, 163], "48842": [73, 79, 84, 130, 136, 150, 154], "14": [73, 91, 94, 104, 105, 116, 117, 120, 132, 142, 145, 150, 154, 156], "subtract": [73, 81], "mayb": [73, 94, 102, 107], "peculiar": [73, 104], "malfunct": 73, "afterward": [73, 80, 108], "cap": [73, 96, 107, 123, 125, 126, 133, 136, 146], "hist": [73, 75, 91, 94, 98, 101, 102, 104, 105, 106, 107, 141], "figsiz": [73, 75, 103, 104, 105, 106, 107, 108, 132, 133, 136, 142, 145, 157, 161, 162, 163], "func": [73, 101, 107], "assign": [73, 79, 81, 84, 98, 104, 109, 115, 132, 139, 141, 144, 147], "underscor": [73, 81, 151], "garbag": 73, "comment": 73, "retir": 73, "filter": [73, 84, 105, 150], "peak": 73, "ll": 73, "32650": 73, "16192": 73, "disproport": 73, "fair": [73, 96, 118], "deploi": [73, 85, 101, 152, 162], "reader": [73, 110, 136, 139, 141, 145], "mitig": [73, 117], "deploy": [73, 152], "compon": [73, 129, 135, 139, 157, 181], "unexpect": [73, 99], "gender": 73, "beyond": [73, 103, 139, 141, 145, 147], "15784": 73, "10878": 73, "bachelor": [73, 84, 85, 136], "8025": 73, "master": [73, 84, 136], "2657": 73, "voc": [73, 84, 136], "2061": 73, "1812": 73, "1601": 73, "10th": [73, 84, 136], "1389": 73, "7th": [73, 84, 136], "8th": [73, 84, 136], "955": 73, "prof": [73, 84, 85, 136], "school": [73, 84, 94, 136], "834": 73, "9th": [73, 84, 136], "756": [73, 116], "12th": [73, 84, 136], "657": 73, "doctor": [73, 84, 136], "594": 73, "5th": [73, 84, 135, 136], "6th": [73, 84, 136], "509": 73, "4th": [73, 84], "247": 73, "preschool": [73, 84], "83": [73, 116, 120], "crosstab": 73, "entri": [73, 76, 101, 104, 105, 106, 107, 114, 123, 128, 134, 141], "lose": 73, "redund": [73, 101, 125, 129, 130, 135, 136, 139], "upcom": [73, 142, 151], "latter": [73, 81, 96, 125, 142], "pairplot": [73, 74, 75, 103, 105, 106, 107, 108, 158], "diagon": [73, 103, 106, 132, 142, 145, 150, 158], "reveal": [73, 101], "sn": [73, 81, 105, 106, 107, 108, 109, 110, 115, 121, 131, 134, 137, 138, 139, 140, 141, 150, 153, 157, 158, 161, 162, 163, 164], "readabl": [73, 150, 153, 179], "n_samples_to_plot": 73, "5000": [73, 106, 125, 126, 134, 136, 140], "var": 73, "hue": [73, 75, 105, 106, 107, 109, 131, 137, 141, 153, 157, 158, 161, 163], "plot_kw": [73, 108], "height": [73, 75, 129, 135], "diag_kind": [73, 108], "diag_kw": 73, "written": [73, 94, 107], "scatterplot": [73, 81, 107, 109, 110, 115, 121, 131, 134, 137, 138, 139, 140, 141, 153, 157, 158, 161, 162, 163, 164], "region": [73, 101, 102, 132, 137, 141, 150, 154], "pyplot": [73, 81, 91, 94, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 134, 136, 137, 138, 141, 142, 145, 146, 157, 161, 162, 163, 164], "plt": [73, 81, 91, 94, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 134, 136, 137, 138, 141, 142, 145, 146, 157, 161, 162, 163, 164], "ax": [73, 81, 103, 104, 105, 108, 109, 119, 131, 132, 133, 134, 136, 137, 139, 140, 142, 145, 150, 153, 157, 161, 162, 163], "age_limit": 73, "axvlin": [73, 107, 108], "ymin": [73, 107], "ymax": [73, 107], "linestyl": [73, 107, 110, 121, 131, 132, 137, 142, 162], "hours_per_week_limit": 73, "axhlin": 73, "xmin": 73, "xmax": 73, "annot": [73, 150], "fontsiz": 73, "AND": [73, 136], "seem": [73, 76, 79, 82, 83, 87, 89, 97, 100, 103, 107, 108, 117, 119, 122, 133, 134, 145, 151, 157], "complic": [73, 107, 125], "similarli": [73, 92, 93, 95, 97, 98, 100, 103, 112, 117, 119, 121, 133, 141, 142, 154, 164], "somewhat": [73, 110], "arbitrari": [73, 84, 85, 86, 88, 89, 90, 108, 140], "straightforward": [73, 110], "obviou": [73, 94, 105], "highlight": [73, 80, 84, 94, 96, 100, 109, 115, 124, 125, 127, 133, 142, 150, 152, 157, 162], "imagin": [74, 75, 108], "feel": [74, 75, 84, 114, 117, 123, 133, 147, 185], "penguins_classif": [74, 75, 109, 131, 137, 141, 157, 158, 159, 161, 163], "There": [75, 81, 101, 103, 106], "39": [75, 84, 85, 104, 116, 118, 120, 129, 135, 154, 158], "adeli": [75, 131, 137, 141, 157, 158, 163], "151": [75, 84, 153], "gentoo": [75, 157, 158, 163], "123": 75, "chinstrap": [75, 131, 137, 141, 157, 158], "pairplot_figur": [75, 158], "prioriti": 75, "tweak": 75, "subfigur": 75, "perfectli": [75, 89, 103, 106, 115, 117, 139, 145], "downsid": [76, 135], "amount": [76, 94, 101, 106, 119, 133], "smaller": [76, 101, 116, 117, 122, 135, 137, 147, 174], "repetit": [76, 94, 150], "aggreg": [76, 107, 114, 123, 133, 142], "clone": [76, 110], "earlier": [76, 85, 107, 118, 142, 157], "computation": [76, 116, 139, 150, 179], "intens": [76, 98, 179], "cv_result": [76, 84, 85, 87, 88, 89, 90, 97, 101, 102, 107, 117, 118, 120, 125, 126, 133, 135, 147, 150, 151, 152, 153, 154, 156, 179], "cpu": [76, 85, 102, 115, 118, 150, 154], "454": 76, "ms": [76, 85, 102, 118, 150, 154], "sy": [76, 85, 102, 118, 150, 154], "259": 76, "total": [76, 85, 94, 100, 101, 102, 104, 105, 106, 107, 108, 118, 130, 135, 136, 142, 150, 154, 156], "713": 76, "wall": [76, 85, 102, 118, 150, 154], "402": 76, "fit_tim": [76, 84, 85, 97, 101, 115, 116, 125, 146, 152], "05886054": 76, "05734253": 76, "05754614": 76, "05606604": 76, "05529737": 76, "score_tim": [76, 84, 85, 97, 101, 115, 116, 125, 146, 152], "01399994": 76, "0136869": 76, "01379323": 76, "01350665": 76, "01337504": 76, "79557785": 76, "80049135": 76, "79965192": 76, "79873055": 76, "80456593": 76, "iii": 76, "distinct": [76, 79, 96, 99], "match": [76, 77, 82, 96, 141], "stabil": [76, 108], "discard": [76, 101, 107, 109, 157], "round": [76, 93, 98, 109], "themselv": 76, "3f": [76, 79, 80, 81, 83, 84, 85, 87, 88, 89, 90, 94, 96, 97, 99, 107, 115, 116, 118, 119, 121, 123, 127, 128, 134, 135, 141, 142, 145, 146, 147, 151, 152, 155, 157], "std": [76, 79, 81, 84, 85, 87, 88, 89, 90, 91, 94, 96, 97, 99, 100, 101, 107, 108, 110, 115, 116, 118, 119, 123, 129, 133, 135, 146, 147, 151, 152], "800": [76, 151], "crucial": [76, 108, 117], "bar": [76, 99, 104, 108, 133, 141, 157], "decim": 76, "trustworthi": [76, 96], "compat": [76, 152], "familiar": [77, 82, 107, 114, 123, 144, 147], "conveni": [77, 82, 133], "directli": [77, 80, 82, 85, 108, 115, 136, 139, 147, 157], "insid": [77, 82, 84, 104, 143, 146, 177], "pager": [77, 82], "roughli": [78, 83, 110, 122, 140], "simplest": [78, 83], "irrespect": [78, 83, 98, 132, 179, 185], "train_test_split": [78, 79, 81, 83, 85, 100, 101, 108, 111, 112, 113, 117, 120, 121, 122, 124, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 163], "behavior": [78, 83, 131, 133, 137, 142, 162], "oversimplifi": 79, "exclus": [79, 132], "helper": [79, 81, 84, 85, 128, 134, 139, 143, 146], "duplic": [79, 84, 85, 110, 150, 154], "48837": [79, 84, 130, 136, 150, 154], "48838": [79, 84, 130, 136, 150, 154], "48839": [79, 84, 130, 136, 150, 154], "48840": [79, 84, 130, 136, 150, 154], "48841": [79, 84, 130, 136, 150, 154], "explicit": [79, 80, 101, 139, 144, 147], "At": [79, 96, 99, 101, 110, 142, 159, 163], "moreov": 79, "o": [79, 163], "self": [79, 80, 84, 96, 136, 154], "explanatori": [79, 100], "000000": [79, 81, 91, 105, 107], "643585": 79, "710510": 79, "min": [79, 81, 91, 105, 107, 110, 128, 133, 134, 138, 140, 161, 162, 164, 174], "37": [79, 80, 81, 84, 101, 104, 107, 108, 117, 153, 154], "max": [79, 81, 91, 105, 107, 110, 128, 133, 134, 138, 139, 140, 155, 161, 162, 164, 174, 179], "float64": [79, 91, 98, 101, 104, 105, 106, 107, 141], "unusu": 79, "memori": [79, 100, 104, 105, 106, 107, 135, 139, 151, 152], "test_siz": [79, 91, 95, 98, 101, 102, 111, 113, 120, 122, 131, 137, 142, 152], "determinist": [79, 98, 139], "specifi": [79, 84, 85, 104, 105, 119, 140, 152, 154, 182], "remain": [79, 80, 99, 100, 108, 125, 133, 154], "quickli": [79, 103, 104, 107, 108, 117, 139, 141, 142, 153, 154, 163], "got": [79, 114, 123, 164], "1f": [79, 110, 147], "12211": 79, "36631": [79, 81], "cours": [79, 84, 104, 125, 128, 134, 139, 143, 146, 159, 163], "environ": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "pleas": [79, 81, 82, 85, 90, 93, 96, 98, 101, 105, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164, 177, 185], "rerun": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "unabl": [79, 81, 82, 85, 90, 96, 99, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "render": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "nbviewer": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "nbsp": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "logisticregressionifittedlogisticregress": [79, 142, 157], "807": [79, 81], "fraction": [79, 102, 117, 142, 145], "correctli": [79, 89, 90, 99, 109, 142], "visit": 80, "glossari": [80, 165], "fed": 80, "73": [80, 104], "3273": 80, "side": [80, 91, 119, 122, 152, 162], "39068": 80, "39069": 80, "39070": 80, "39071": 80, "39072": 80, "39073": 80, "linger": [80, 84, 94, 101], "denomin": 80, "major": [80, 83, 98, 137], "seldom": 80, "target_predict": [80, 100, 101, 109, 120, 121, 128, 134, 139, 142, 145, 161, 162], "sake": [80, 90, 93, 98, 117, 139, 152, 157, 177], "simplic": [80, 90, 93, 98, 139, 157, 177], "agre": [80, 122, 141], "bool": [80, 104, 142], "mistak": [80, 109, 127, 137, 142], "success": [80, 81, 115, 163], "8242776341719346": 80, "harder": [80, 125], "conclud": [80, 86, 88, 98, 110, 137, 157], "ones": [80, 84, 137, 150, 151, 154], "adult_census_test": [80, 82], "9769": 80, "manual": [80, 84, 85, 110, 115, 124, 127, 139, 142, 151, 157, 165, 182], "model_nam": [80, 81, 151], "__class__": [80, 81], "__name__": [80, 81], "804": 80, "underli": [80, 81, 84, 94, 109, 115, 142], "wrongli": [80, 85], "held": [80, 91, 113, 117, 122, 152], "642352": 81, "1087": 81, "077721": 81, "665311": 81, "431247": 81, "725748": 81, "7522": 81, "692939": 81, "407": 81, "110175": 81, "423952": 81, "99999": 81, "4356": 81, "99": 81, "span": [81, 133], "assumpt": [81, 84, 88, 100, 126, 161, 162], "address": 81, "pair": [81, 103, 105, 106, 107, 128, 130, 134, 136, 153], "solver": [81, 133, 147], "descent": [81, 105, 147], "scaler": [81, 90, 151, 156, 179, 181, 185], "standardscalerifittedstandardscal": 81, "wherea": [81, 117, 131, 135, 137, 147, 149, 156], "fashion": [81, 163], "mean_": 81, "64235211": 81, "07772106": 81, "6653108": 81, "43124676": 81, "scale_": 81, "72556083": 81, "59025606": 81, "10461772": 81, "42378265": 81, "data_train_sc": 81, "17177061": 81, "14450843": 81, "71188483": 81, "28845333": 81, "02605707": 81, "22025127": 81, "27618374": 81, "33822677": 81, "77019645": 81, "77536738": 81, "03471139": 81, "53605445": 81, "48319243": 81, "69090725": 81, "perspect": [81, 87, 89, 102], "predefin": 81, "shorthand": 81, "preserv": [81, 99, 150], "set_output": [81, 84, 129, 133, 135], "behaviour": [81, 100, 117, 126], "663100e": 81, "273364e": 81, "530310e": 81, "840667e": 81, "844684e": 81, "000014e": 81, "576792e": 81, "445084e": 81, "202513e": 81, "173852e": 81, "753674e": 81, "471139e": 81, "196565e": 81, "817680e": 81, "677425e": 81, "741752e": 81, "314865e": 81, "047970e": 81, "714245e": 81, "jointplot": 81, "clearer": 81, "num_points_to_plot": 81, "marginal_kw": 81, "dict": [81, 103, 132, 151], "suptitl": [81, 132, 142, 145, 163], "nbefor": 81, "nafter": 81, "x27": [81, 85, 90, 96, 109, 132, 133, 136, 139, 150, 152, 154], "pipelineinot": [81, 85, 90, 132, 136, 139, 150, 152, 154], "fittedpipelin": [81, 85, 90, 132, 136, 139, 150, 152, 154], "standardscalerstandardscal": [81, 85, 90, 132, 136], "logisticregressionlogisticregress": [81, 85, 90, 132, 136], "named_step": 81, "decision_funct": 81, "elapsed_tim": [81, 87, 89], "predicted_target": 81, "n_iter_": [81, 123], "055": 81, "120": 81, "scenario": [81, 85, 133, 141, 145], "kneighborsclassifierifittedkneighborsclassifi": 82, "first_data_valu": 82, "first_predict": 82, "first_target_valu": 82, "number_of_correct_predict": 82, "number_of_predict": 82, "len": [82, 101, 103, 108, 109, 116, 117, 132, 136, 163], "8290379545978042": 82, "8177909714402702": 82, "data_numeric_train": 83, "data_numeric_test": 83, "class_to_predict": 83, "high_revenue_clf": 83, "234": 83, "low_revenue_clf": 83, "766": 83, "7607182343065395": 83, "appear": [83, 110], "most_freq_revenue_clf": 83, "frequent": [83, 88, 93, 98, 104, 154, 174], "reassur": [83, 122, 152], "sequenc": [84, 115, 148, 155], "arithmet": 84, "instruct": 84, "taken": [84, 108, 119, 140], "symbol": [84, 100], "sort_index": 84, "857": [84, 155], "cambodia": 84, "canada": 84, "182": 84, "china": 84, "122": [84, 101, 107, 108], "columbia": 84, "85": [84, 90, 101, 107, 108, 154, 179], "cuba": 84, "138": [84, 146], "dominican": 84, "republ": 84, "103": [84, 105, 147, 157], "ecuador": 84, "el": 84, "salvador": 84, "155": [84, 154], "england": 84, "127": 84, "franc": 84, "germani": 84, "206": [84, 162], "greec": 84, "guatemala": 84, "88": [84, 101, 107, 108, 117, 150, 154], "haiti": 84, "holand": 84, "netherland": 84, "hondura": 84, "hong": 84, "hungari": 84, "india": 84, "iran": 84, "59": [84, 104, 105, 120], "ireland": 84, "itali": 84, "105": [84, 105], "jamaica": 84, "106": [84, 105], "japan": 84, "lao": 84, "mexico": [84, 136], "951": 84, "nicaragua": 84, "outli": 84, "guam": 84, "usvi": 84, "peru": 84, "philippin": 84, "295": 84, "poland": 84, "87": [84, 117, 143, 146, 150], "portug": 84, "67": [84, 104, 105, 108], "puerto": 84, "rico": 84, "184": 84, "scotland": 84, "south": [84, 108], "115": [84, 101], "taiwan": 84, "thailand": 84, "trinadad": 84, "tobago": 84, "43832": 84, "vietnam": 84, "86": [84, 90, 101, 107, 108, 153, 154], "yugoslavia": 84, "recogn": [84, 94], "categorical_columns_selector": [84, 85, 86, 87, 88, 89, 150, 152, 154], "dtype_includ": [84, 85, 86, 87, 88, 89, 119, 136, 148, 150, 152, 154, 155], "unwant": [84, 107], "data_categor": [84, 86, 88], "education_column": 84, "education_encod": 84, "map": [84, 88, 103, 131, 132, 133, 136, 137, 141, 156], "categories_": 84, "data_encod": 84, "downstream": [84, 135], "lexicograph": 84, "meaningless": [84, 110, 162], "l": [84, 94], "xl": 84, "alphabet": 84, "constructor": 84, "explicitli": [84, 143, 146, 154, 156], "mislead": [84, 89, 108], "altern": [84, 129, 135, 139, 141, 154, 161], "sparse_output": [84, 87, 89], "education_": [84, 136], "spars": [84, 87, 89, 101, 107], "effici": [84, 109, 116, 119, 133, 139], "won": [84, 108], "becam": 84, "workclass_": [84, 136], "feder": [84, 136], "emp": [84, 136], "inc": [84, 136], "country_": [84, 136], "amp": 84, "102": [84, 105], "violat": [84, 100], "realli": [84, 94, 100, 102, 105, 109, 137, 142], "misord": 84, "misus": 84, "ineffici": 84, "integr": [84, 116, 133], "abl": [84, 85, 97, 103, 105, 110, 111, 115, 116, 119, 120, 130, 136, 142, 143, 146, 153, 157, 160, 162, 164, 171, 182, 185], "bypass": 84, "keyword": 84, "min_frequ": [84, 130, 136], "collaps": 84, "rarest": 84, "enabl": [84, 185], "infrequent_if_exist": 84, "sandbox": [84, 179], "use_encoded_valu": [84, 85, 86, 87, 88, 89, 119, 148, 150, 152, 154, 155], "unknown_valu": [84, 85, 86, 87, 88, 89, 119, 148, 150, 152, 154, 155], "silenc": 84, "convergencewarn": 84, "18205142": 84, "1719234": 84, "17490363": 84, "18012547": 84, "16994405": 84, "02308416": 84, "02262449": 84, "023211": 84, "02415061": 84, "02236366": 84, "83232675": 84, "83570478": 84, "82831695": 84, "83292383": 84, "83497133": 84, "833": [84, 88, 155], "decoupl": [85, 142], "numerical_columns_selector": [85, 87, 89], "dtype_exclud": [85, 87, 89, 136], "properli": [85, 96, 105, 113, 122, 161], "format": [85, 100, 105, 134, 140], "elaps": [85, 125], "introspect": [85, 185], "send": 85, "columntransfom": 85, "categorical_preprocessor": [85, 87, 89, 148, 150, 152, 154, 155], "numerical_preprocessor": 85, "associ": [85, 94, 105, 107, 133, 142, 145, 150], "standard_scal": 85, "concaten": [85, 93, 94, 98, 99, 132, 139, 152, 161], "columntransformercolumntransform": [85, 90, 136, 150, 152, 154], "onehotencoderonehotencod": [85, 90, 136], "prefer": 85, "raw": [85, 133, 137, 145, 185], "7762": 85, "56": [85, 104, 105, 120, 146], "divorc": [85, 136], "unmarri": [85, 136], "23881": 85, "transport": [85, 136], "30507": 85, "specialti": [85, 136], "14344": 85, "28911": 85, "19484": 85, "wife": [85, 136], "8575055278028008": 85, "usabl": 85, "25311279": 85, "25740528": 85, "22811413": 85, "24582815": 85, "26973963": 85, "02794933": 85, "02916598": 85, "02738404": 85, "02908301": 85, "02688217": 85, "85116184": 85, "84993346": 85, "8482801": 85, "85257985": 85, "85544636": 85, "851": [85, 119], "002": [85, 88, 89, 151], "compound": 85, "isol": [85, 101, 117], "nice": [85, 109, 139], "fast": [85, 89, 115], "passthrough": [85, 87, 89, 119, 148, 150, 152, 154, 155], "670": 85, "690": 85, "8792891655065105": 85, "significantli": [85, 102, 108], "whenev": [85, 105], "popular": [85, 103, 119], "datasci": 85, "practition": 85, "outperform": 85, "assembl": [86, 88, 115, 117], "rais": [86, 88, 109, 110, 119, 121, 143, 146], "warn": [86, 88, 109, 110, 116, 121, 133, 143, 146], "nan": [86, 88, 90, 99, 104, 143, 146], "traceback": [86, 88, 143, 146], "error_scor": [86, 88], "awai": [86, 88, 89, 110, 137, 145, 165], "handi": [86, 88, 101, 105, 143, 146], "empir": [87, 89, 101], "util": [87, 89, 90, 98, 104, 143, 146], "873": [87, 89], "214": 87, "detriment": [87, 89, 117, 119, 133], "dens": [87, 89], "workaround": [87, 89], "755": 88, "rel": [88, 93, 98, 101, 118, 126, 136, 139, 142, 145], "anyth": [88, 100, 124, 127, 142], "constantli": [88, 93, 98], "761": 88, "messag": [88, 89], "289": 89, "307": 89, "signific": [89, 108, 117, 125, 132, 133, 152], "useless": [89, 122], "988": 89, "view": [89, 152], "longer": [89, 133, 137, 140, 151, 158, 163], "current": [89, 122, 177], "incomplet": 89, "unnecessari": [89, 113, 122], "unless": 89, "reproduc": [90, 105, 152], "script": 90, "event": 90, "rerecord": 90, "ui": 90, "releas": 90, "house_pric": [90, 104, 144, 145, 147], "na_valu": [90, 104], "id": [90, 94, 104], "mssubclass": [90, 104], "mszone": [90, 104], "street": [90, 104], "allei": [90, 104], "lotshap": [90, 104], "landcontour": [90, 104], "poolqc": [90, 104], "fenc": [90, 104], "miscfeatur": [90, 104], "mosold": [90, 104], "yrsold": [90, 104, 133], "saletyp": [90, 104], "salecondit": [90, 104], "rl": [90, 104], "8450": [90, 104], "pave": [90, 104], "reg": [90, 104, 108], "lvl": [90, 104], "allpub": [90, 104], "2008": [90, 104], "wd": [90, 104], "9600": [90, 104], "2007": [90, 104], "11250": [90, 104], "ir1": [90, 104], "9550": [90, 104], "2006": [90, 104], "abnorml": [90, 104], "84": [90, 104, 135], "14260": [90, 104], "1455": 90, "1456": 90, "7917": 90, "1457": 90, "13175": 90, "mnprv": [90, 104], "2010": 90, "1458": 90, "9042": 90, "gdprv": 90, "shed": [90, 104], "2500": 90, "1459": [90, 104], "9717": 90, "1460": [90, 104], "9937": 90, "cherri": 90, "retain": [90, 130, 133, 136], "numeric_featur": 90, "fullbath": [90, 104], "halfbath": [90, 104], "neighborhood": [90, 91, 104, 108], "housestyl": [90, 104], "imput": [90, 104], "simpleimput": [90, 104], "numeric_transform": 90, "categorical_transform": 90, "join": 90, "simpleimputersimpleimput": 90, "859": [90, 155], "018": 90, "dollar": [90, 91, 101, 107, 133], "necessarili": [90, 101, 102, 118, 132, 133, 147, 150, 182], "richer": [90, 139], "level": [90, 96, 98, 117, 119, 124, 125, 127, 131, 132, 136, 137, 142, 159, 160, 161, 163, 164, 177], "coars": 90, "dummyregressor": [91, 145], "overview": [91, 92, 95, 97, 101, 102, 107, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164, 165], "fetch_california_h": [91, 95, 101, 102, 107, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 149, 156], "return_x_i": [91, 96, 99, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 149, 156, 179], "as_fram": [91, 95, 99, 101, 102, 107, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 149, 156], "rescal": [91, 95, 102, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 133, 145, 149, 151, 156], "splitter": 91, "cv_results_tree_regressor": 91, "n_job": [91, 94, 95, 96, 97, 98, 100, 102, 107, 108, 115, 116, 117, 118, 119, 120, 122, 123, 125, 127, 133, 135, 136, 150, 152, 154, 156], "errors_tree_regressor": 91, "832692": 91, "217875": 91, "260987": 91, "823162": 91, "836318": 91, "695681": 91, "329163": 91, "result_dummi": 91, "errors_dummy_regressor": 91, "91": [91, 104], "140009": 91, "821140": 91, "757566": 91, "543652": 91, "034555": 91, "979007": 91, "477244": 91, "all_error": 91, "concat": [91, 98, 99, 108, 123, 125, 126, 137], "47": [91, 101, 104, 120], "076108": 91, "713153": 91, "897358": 91, "539353": 91, "313195": 91, "941912": 91, "092870": 91, "213912": 91, "015862": 91, "419226": 91, "542490": 91, "330733": 91, "961501": 91, "390784": 91, "947952": 91, "777050": 91, "991373": 91, "625482": 91, "023571": 91, "719080": 91, "556965": 91, "306048": 91, "539567": 91, "452684": 91, "185225": 91, "592580": 91, "298971": 91, "553189": 91, "084639": 91, "281297": 91, "984471": 91, "123369": 91, "981744": 91, "710941": 91, "547140": 91, "058690": 91, "820219": 91, "768721": 91, "911982": 91, "305556": 91, "490034": 91, "503017": 91, "542629": 91, "147974": 91, "369182": 91, "386320": 91, "760654": 91, "815660": 91, "200337": 91, "216574": 91, "189938": 91, "107460": 91, "740784": 91, "620318": 91, "502880": 91, "165331": 91, "linspac": [91, 95, 97, 98, 110, 115, 121, 128, 134, 138, 140, 153, 163], "edgecolor": [91, 94, 98, 101, 102, 104, 105, 106, 107, 132, 153, 163], "legend": [91, 94, 98, 99, 100, 105, 107, 109, 110, 115, 121, 131, 134, 136, 137, 141, 142, 153, 157, 161, 162, 163, 164], "bbox_to_anchor": [91, 94, 98, 99, 100, 105, 107, 109, 110, 115, 121, 131, 134, 136, 137, 142, 153, 157, 161, 163], "loc": [91, 94, 98, 99, 100, 105, 107, 109, 110, 115, 121, 131, 134, 136, 137, 141, 142, 153, 157, 161, 163], "upper": [91, 94, 98, 99, 100, 105, 107, 108, 110, 115, 121, 131, 136, 137, 142, 157, 161, 163], "xlabel": [91, 94, 96, 97, 98, 99, 101, 102, 104, 105, 106, 108, 122, 123, 125, 126, 132, 133, 135, 136, 141, 142], "Such": [91, 133], "extrem": [91, 96, 106, 107, 124, 127, 132, 133], "gamma": [92, 96, 97, 131, 132, 137], "svm": [92, 96, 97, 139], "form": [92, 94, 96, 97, 108, 128, 129, 134, 135, 138, 140, 145, 151], "accomplish": [92, 97], "rbf": [92, 97, 131, 132, 137], "svc": [92, 96, 97], "scheme": [92, 97, 100, 109, 162], "validationcurvedisplai": [92, 97, 102, 122, 129, 135], "10e": [92, 97], "10e2": [92, 97], "logarithm": [92, 97], "svc__gamma": [92, 97], "retriev": [92, 97, 101, 108, 133], "learningcurvedisplai": [92, 95, 97], "half": [93, 98, 142], "uniform": [93, 98, 107, 110, 132, 154], "handwritten": 94, "digit": 94, "load_digit": 94, "recreat": 94, "minmaxscal": [94, 110, 133, 185], "kfold": [94, 96, 99, 123, 144, 147, 152], "test_score_no_shuffl": 94, "931": 94, "027": 94, "test_score_with_shuffl": 94, "964": 94, "006": [94, 119, 123], "all_scor": [94, 96], "xlim": [94, 108, 115, 142], "impos": [94, 117, 154], "94166667": 94, "89722222": 94, "94707521": 94, "96657382": 94, "90250696": 94, "ship": 94, "descr": [94, 101, 107], "_digits_dataset": 94, "optic": 94, "recognit": 94, "characterist": [94, 101, 107, 142], "1797": 94, "8x8": 94, "pixel": 94, "creator": 94, "alpaydin": 94, "boun": 94, "edu": 94, "tr": 94, "juli": 94, "1998": 94, "copi": [94, 101, 105, 108, 128, 130, 134, 136, 179], "uci": 94, "ic": 94, "nist": 94, "bitmap": 94, "preprint": 94, "32x32": 94, "nonoverlap": 94, "block": [94, 100, 101, 107, 108, 116], "4x4": 94, "invari": [94, 132], "distort": 94, "garri": 94, "j": 94, "candela": 94, "dimmick": 94, "geist": 94, "grother": 94, "janet": 94, "wilson": 94, "handprint": 94, "nistir": 94, "5469": 94, "kaynak": 94, "1995": 94, "Their": [94, 141], "msc": 94, "thesi": 94, "institut": 94, "graduat": 94, "bogazici": 94, "univers": 94, "cascad": 94, "kybernetika": 94, "ken": 94, "tang": 94, "ponnuthurai": 94, "n": [94, 96, 97, 107, 110, 116, 120, 123, 128, 131, 133, 134, 137, 141, 142, 151, 152, 155, 157], "suganthan": 94, "xi": 94, "yao": 94, "kai": 94, "qin": 94, "dimensionalityreduct": 94, "lda": 94, "electr": [94, 104], "electron": 94, "nanyang": 94, "2005": 94, "claudio": 94, "gentil": 94, "nip": 94, "2000": 94, "writer": 94, "wrote": 94, "certain": [94, 103, 123], "130": [94, 116], "hypothesi": [94, 100, 133], "itertool": [94, 104], "bound": [94, 142, 145], "writer_boundari": 94, "256": [94, 116, 117, 154], "386": 94, "516": 94, "646": 94, "776": 94, "915": 94, "1029": 94, "1157": 94, "1287": 94, "1415": 94, "1545": 94, "1667": 94, "zeros_lik": [94, 109], "lower_bound": 94, "upper_bound": 94, "group_id": 94, "lb": 94, "zip": [94, 105, 115, 128, 132, 134], "ytick": [94, 99], "xtick": 94, "ylabel": [94, 99, 100, 102, 122, 123, 132, 133, 135, 136, 141, 142, 157], "groupkfold": 94, "921": 94, "021": 94, "realiti": 94, "synthet": [95, 110, 115, 125, 130, 132, 136, 142, 160, 161, 162, 164], "train_siz": [95, 97, 148, 155], "endpoint": 95, "325": [95, 105], "775": 95, "displai": [95, 103, 112, 121, 141, 142, 153, 179], "from_estim": [95, 97, 102, 109, 122, 131, 132, 135, 137, 141, 142, 157, 161, 163], "score_typ": [95, 97], "negate_scor": [95, 102, 122, 135], "neg_": [95, 101, 129, 135, 147], "score_nam": [95, 97], "std_display_styl": [95, 97, 102, 122, 135], "errorbar": [95, 97, 102, 122, 133, 135], "ax_": [95, 97, 102, 122, 131, 135, 137, 142], "xscale": [95, 133], "log": [95, 133, 145, 153, 154], "alon": [95, 132], "anymor": [95, 98, 100, 101, 117], "bay": 95, "especi": [95, 133, 136], "report": [95, 96, 101], "problemat": [96, 133, 154], "underestim": 96, "philosoph": 96, "breast": 96, "cancer": 96, "load_breast_canc": 96, "param_grid": [96, 118, 120, 150, 152, 161, 179, 185], "model_to_tun": 96, "gridsearchcvifittedgridsearchcv": [96, 150, 152], "svcsvc": 96, "best_params_": [96, 123, 149, 150, 152, 154, 156, 161, 179, 185], "best_score_": 96, "627": 96, "stage": [96, 99, 116, 124, 125, 127, 139, 142, 163], "misinterpret": 96, "forget": 96, "pitfal": [96, 136], "emb": [96, 152], "dedic": [96, 145], "declar": 96, "inner_cv": 96, "outer_cv": 96, "014": 96, "trial": 96, "test_score_not_nest": 96, "test_score_nest": 96, "n_trial": 96, "non_nest": 96, "append": [96, 99, 108, 110, 121, 123, 137, 147], "merg": [96, 125], "whisker": [96, 107, 123, 125, 126, 133, 136, 146], "vert": [96, 107, 123, 125, 126, 133, 136, 146], "highest": [96, 109, 124, 125, 127, 142, 145, 153, 154], "lure": 96, "overli": [96, 101], "012239": 97, "002296": 97, "680000": 97, "011681": 97, "002262": 97, "746667": 97, "011859": 97, "002250": 97, "786667": 97, "011066": 97, "002401": 97, "800000": 97, "011485": 97, "002170": 97, "011416": 97, "002154": 97, "011133": 97, "002183": 97, "009980": 97, "002140": 97, "826667": 97, "010160": 97, "002126": 97, "010295": 97, "002155": 97, "733333": 97, "765": 97, "043": 97, "param_nam": [97, 102, 122, 135, 150, 153, 154, 179, 185], "disp": [97, 102, 122, 131, 135, 137, 142], "errorbar_kw": 97, "transpar": 97, "regim": 97, "oscil": 97, "donat": [97, 106, 142, 143, 146], "simplist": 97, "imposs": 97, "cv_results_logistic_regress": 98, "test_score_logistic_regress": 98, "815937": 98, "813849": 98, "815036": 98, "815569": 98, "810982": 98, "814831": 98, "813112": 98, "810368": 98, "812375": 98, "816306": 98, "most_frequent_classifi": 98, "cv_results_most_frequ": 98, "test_score_most_frequ": 98, "760329": 98, "756808": 98, "759142": 98, "760739": 98, "761681": 98, "761885": 98, "757463": 98, "757176": 98, "763114": 98, "all_test_scor": 98, "stratified_dummi": 98, "cv_results_stratifi": 98, "test_score_dummy_stratifi": 98, "uniform_dummi": 98, "cv_results_uniform": 98, "test_score_dummy_uniform": 98, "wrong": [98, 103, 124, 127, 153], "henc": [98, 108, 115, 133, 154], "uniformli": [98, 110, 137], "weakest": 98, "argu": 98, "permutation_test_scor": 98, "permut": [98, 179], "quit": [98, 99, 100, 102, 103, 105, 116, 141], "strongest": 98, "load_iri": [99, 179], "toi": [99, 139], "nine": 99, "data_random": 99, "randn": [99, 110, 115, 124, 127, 139], "train_index": 99, "test_index": 99, "six": 99, "train_cv_count": 99, "test_cv_count": 99, "fold_idx": 99, "train_idx": 99, "test_idx": 99, "enumer": [99, 109, 110, 121, 123, 125, 128, 134, 152], "idx": [99, 125, 163], "953": 99, "009": [99, 116], "frequenc": [99, 106, 142], "stratifiedkfold": [99, 143, 146], "967": 99, "past": [100, 106, 128, 130, 134, 136, 142], "ident": [100, 101, 116, 142, 152], "financi": 100, "quotat": 100, "tot": 100, "xom": 100, "exxon": 100, "cvx": 100, "chevron": 100, "cop": 100, "conocophillip": 100, "vlo": 100, "valero": 100, "template_nam": 100, "quot": 100, "stock": 100, "2f": [100, 101, 117, 120, 122, 128, 131, 133, 134, 137, 138, 139, 140, 142, 150, 154, 157, 163], "surprisingli": [100, 101, 107, 127], "outstand": 100, "eas": [100, 101, 103, 132, 139], "r2_score": 100, "verifi": [100, 113, 122, 139], "doesn": 100, "proper": [100, 107, 139, 152], "to_period": 100, "q": 100, "94": [100, 109], "forecast": 100, "ulterior": 100, "timeseriessplit": 100, "nuniqu": [100, 105, 161, 185], "118": 100, "511": 100, "shelv": 100, "absurd": 100, "intend": [101, 106, 177], "dive": 101, "area": [101, 102, 104, 110, 137, 142], "geograph": [101, 107, 118], "_california_housing_dataset": [101, 107], "20640": [101, 107], "medinc": [101, 107, 108], "houseag": [101, 107, 108], "averoom": [101, 107, 108, 156], "household": [101, 107], "avebedrm": [101, 107, 108], "aveoccup": [101, 107, 108], "member": [101, 107], "latitud": [101, 107, 108], "longitud": [101, 107, 108], "statlib": [101, 107], "dcc": [101, 107], "fc": [101, 107], "pt": [101, 107], "ltorgo": [101, 107], "cal_hous": [101, 107], "district": [101, 107, 108, 118], "hundr": [101, 107, 125], "deriv": [101, 105, 107, 129, 135, 139], "1990": [101, 107], "u": [101, 107], "smallest": [101, 107, 116, 133], "bureau": [101, 107], "600": [101, 107], "resid": [101, 107], "home": [101, 107], "empti": [101, 107], "vacat": [101, 107], "resort": [101, 107], "pace": [101, 107], "kellei": [101, 107], "ronald": [101, 107], "barri": [101, 107], "spatial": [101, 103, 107], "autoregress": [101, 107], "1997": [101, 107], "291": [101, 107], "297": [101, 107], "3252": [101, 107, 108], "984127": [101, 107, 108], "023810": [101, 107, 108], "322": [101, 107, 108, 116], "555556": [101, 107, 108], "3014": [101, 107, 108], "238137": [101, 107, 108], "971880": [101, 107, 108], "2401": [101, 107, 108], "109842": [101, 107, 108], "2574": [101, 107, 108], "288136": [101, 107, 108], "073446": [101, 107, 108], "496": [101, 107, 108, 153, 154], "802260": [101, 107, 108], "6431": [101, 107, 108], "817352": [101, 107, 108], "073059": [101, 107, 108], "558": [101, 107, 108], "547945": [101, 107, 108], "8462": [101, 107, 108], "281853": [101, 107, 108], "081081": [101, 107, 108], "565": [101, 107, 108], "181467": [101, 107, 108], "452": 101, "358": 101, "352": 101, "341": 101, "342": 101, "medhousev": [101, 107, 108], "decisiontreeregressorifitteddecisiontreeregressor": [101, 139, 164], "mean_absolute_error": [101, 113, 120, 121, 122, 138, 145], "grown": [101, 113, 117, 122], "leaf": [101, 117, 150, 154, 155, 157, 161, 163, 173, 174], "node": [101, 117, 119, 150, 155, 157, 161, 163, 172, 173, 174], "phenomena": 101, "unstabl": [101, 133], "wouldn": 101, "unlimit": [101, 117], "lucki": 101, "easiest": 101, "variant": 101, "139716": 101, "002720": 101, "909797": 101, "140345": 101, "002705": 101, "421170": 101, "138842": 101, "002797": 101, "411089": 101, "140532": 101, "002750": 101, "319824": 101, "137840": 101, "002907": 101, "607875": 101, "front": 101, "revert": [101, 129, 135], "negat": 101, "test_error": [101, 133], "139676": 101, "003097": 101, "901300": 101, "140465": 101, "003156": 101, "572767": 101, "140544": 101, "003002": 101, "194585": 101, "141134": 101, "002926": 101, "590236": 101, "141419": 101, "002745": 101, "727998": 101, "percentag": [101, 110, 145], "tag": [101, 128, 134], "expert": [101, 139], "16606712": 101, "16141486": 101, "16183972": 101, "16127944": 101, "15610027": 101, "00233054": 101, "00216913": 101, "00233126": 101, "00212193": 101, "00216627": 101, "26291527": 101, "41947109": 101, "44492564": 101, "23357874": 101, "40788361": 101, "extens": 101, "overal": [101, 113, 117, 118, 122, 133, 137, 142, 161], "fluctuat": [102, 139, 150], "hopefulli": [102, 117, 138], "260": 102, "305": 102, "harm": 102, "matter": [102, 126, 152], "compromis": [102, 142], "dispers": [102, 116], "usa": 103, "usd": 103, "outdat": 103, "fnlwgt": 103, "respond": 103, "columns_to_plot": 103, "pairwis": 103, "space": [103, 110, 112, 114, 115, 121, 123, 127, 129, 132, 133, 135, 136, 137, 141, 157, 159, 162, 163], "plotli": [103, 153, 156, 179], "parcoord": 103, "graph_object": 103, "labelencod": 103, "le": 103, "def": [103, 108, 110, 115, 128, 131, 132, 134, 137, 139, 140, 150, 153, 154, 161, 179], "generate_dict": 103, "col": [103, 116], "tickval": 103, "classes_": [103, 141, 142, 157, 163, 181], "ticktext": 103, "els": [103, 110, 139, 161], "plot_list": 103, "fig": [103, 104, 108, 132, 133, 142, 145, 153, 156, 157, 158, 179], "colorscal": 103, "viridi": [103, 105, 107, 153, 163, 179], "coordin": [103, 141, 152, 153, 156, 179], "hold": [103, 145, 153, 156, 179, 185], "undo": [103, 153], "4000": [103, 106], "spearman": 103, "rank": [103, 117, 123, 185], "pearson": 103, "versatil": 103, "meant": 103, "nomin": 103, "hierarchi": 103, "squareform": 103, "stat": [103, 117, 120, 154], "spearmanr": 103, "ax1": 103, "ax2": 103, "subplot": [103, 104, 108, 132, 133, 136, 142, 145, 157, 161, 162, 163], "corr": 103, "symmetr": [103, 117, 133, 145, 161], "fill_diagon": 103, "hierarch": 103, "ward": 103, "linkag": 103, "distance_matrix": 103, "ab": [103, 134, 136, 139], "dist_linkag": 103, "dendro": 103, "dendrogram": 103, "to_list": 103, "leaf_rot": 103, "dendro_idx": 103, "arang": [103, 105, 107, 108, 110, 136, 161, 162, 164], "ivl": 103, "imshow": [103, 163], "cmap": [103, 109, 131, 132, 137, 141, 150, 157, 161, 163], "coolwarm": 103, "set_xtick": 103, "set_ytick": [103, 108], "set_xticklabel": 103, "vertic": [103, 141], "set_yticklabel": [103, 108], "tight_layout": 103, "diverg": [103, 131, 137, 141, 156, 163], "colormap": [103, 131, 132, 137, 141, 163], "softer": [103, 141], "anti": 103, "dark": [103, 137], "directori": [104, 105, 106], "charact": 104, "marker": [104, 109, 142, 163], "pars": [104, 105], "lotconfig": 104, "208500": 104, "fr2": 104, "181500": 104, "223500": 104, "corner": [104, 142], "140000": 104, "250000": 104, "nin": 104, "tail": [104, 105, 107, 141], "coupl": [104, 105, 107, 116, 117, 133, 154], "core": [104, 105, 106, 107, 115, 116], "rangeindex": [104, 105, 106, 107], "null": [104, 105, 106, 107, 141], "1201": 104, "landslop": 104, "condition1": 104, "condition2": 104, "bldgtype": 104, "yearremodadd": 104, "roofstyl": 104, "roofmatl": 104, "exterior1st": 104, "exterior2nd": 104, "masvnrtyp": 104, "588": 104, "1452": 104, "exterqu": 104, "extercond": 104, "foundat": 104, "bsmtqual": 104, "1423": 104, "bsmtcond": 104, "bsmtexposur": 104, "1422": 104, "bsmtfintype1": 104, "bsmtfintype2": 104, "heat": 104, "heatingqc": 104, "centralair": 104, "bsmtfullbath": 104, "bsmthalfbath": 104, "kitchenqu": 104, "fireplacequ": 104, "770": 104, "garagetyp": 104, "1379": 104, "garageyrblt": 104, "garagefinish": 104, "garagequ": 104, "garagecond": 104, "paveddr": 104, "69": [104, 109, 150, 154], "281": 104, "901": 104, "kb": [104, 106], "numerical_data": 104, "410": 104, "layout": 104, "subplots_adjust": [104, 105, 107, 108], "hspace": [104, 105, 107], "wspace": [104, 107], "criterion": [104, 108, 157], "swim": 104, "pool": [104, 127], "string_data": 104, "490": [104, 127], "ceil": 104, "zip_longest": 104, "n_string_featur": 104, "nrow": [104, 142, 163], "ncol": [104, 132, 134, 142, 145, 163], "ravel": [104, 105, 134, 141, 163], "barh": [104, 106, 108, 137, 141, 142], "set_titl": [104, 139, 140, 142, 163], "databas": [104, 156], "grvl": 104, "gd": 104, "make_column_transform": [104, 119, 136], "most_frequent_imput": 104, "mean_imput": 104, "ames_housing_preprocess": 104, "tolist": [104, 130, 136, 157, 163], "timestamp": 105, "150": [105, 109], "0880": 105, "033870": 105, "161": [105, 116, 157], "336": 105, "0842": 105, "033571": 105, "163": 105, "409": 105, "0234": 105, "033223": 105, "156": 105, "445": 105, "0016": 105, "032908": 105, "148": 105, "441": 105, "1144": 105, "38254": 105, "38253": 105, "mb": [105, 107], "str": 105, "datetim": 105, "direct": [105, 132, 137, 154], "reopen": 105, "09": [105, 133], "explan": [105, 158], "soup": 105, "blender": 105, "blend": [105, 132], "veget": 105, "instantan": 105, "profession": 105, "calibr": 105, "track": 105, "spent": [105, 125], "food": 105, "uranium": 105, "petrol": 105, "ga": 105, "coal": 105, "plant": 105, "400": 105, "cheaper": [105, 108], "w": [105, 145, 163], "deliv": 105, "breakout": 105, "kilomet": 105, "costli": [105, 137, 149, 150, 156], "cruis": 105, "datetime64": 105, "ns": 105, "freq": 105, "august": 105, "septemb": 105, "date_first_rid": 105, "cycling_rid": 105, "data_rid": 105, "target_rid": 105, "tempor": 105, "resolut": [105, 154], "smoother": [105, 110], "tmp": [105, 142], "ipykernel_3158": 105, "3967367282": 105, "py": [105, 116, 133, 142, 143, 146], "futurewarn": [105, 142], "deprec": [105, 142], "set_xlabel": [105, 145, 163], "extremum": 105, "rng": [105, 107, 108, 110, 115, 127, 139], "randomst": [105, 107, 108, 110, 115, 127, 132, 139], "quantiz": [105, 107], "midpoint": [105, 107], "interv": [105, 107, 110, 112, 115, 121, 160, 162, 164], "qcut": [105, 107], "retbin": [105, 107], "lambda": [105, 107, 153, 179], "mid": [105, 107], "palett": [105, 107, 109, 131, 137, 141, 157, 161, 163], "uphil": 105, "physiolog": 105, "stimuli": 105, "recenc": [106, 142], "monetari": [106, 142], "12500": 106, "98": [106, 108, 157], "3250": [106, 140], "6000": 106, "cm\u00b3": [106, 142], "748": 106, "747": 106, "noth": [106, 110], "shock": 106, "her": 106, "762032": 106, "237968": 106, "strike": 106, "fetch": 107, "internet": 107, "california_h": 107, "526": 107, "585": 107, "521": [107, 142], "413": [107, 154], "422": [107, 142], "demographi": 107, "granular": [107, 142], "20639": 107, "640": [107, 157], "unnotic": 107, "features_of_interest": [107, 133], "429000": 107, "096675": 107, "070655": 107, "1425": 107, "476744": 107, "474173": 107, "473911": 107, "386050": 107, "1132": 107, "462122": 107, "846154": 107, "333333": 107, "692308": 107, "440716": 107, "006079": 107, "429741": 107, "787": [107, 151], "229129": 107, "048780": 107, "818116": 107, "1166": 107, "052381": 107, "099526": 107, "282261": 107, "1725": 107, "141": 107, "909091": 107, "066667": 107, "1243": 107, "35682": 107, "huge": 107, "datapoint": [107, 137], "coast": 107, "big": [107, 145], "citi": [107, 145], "san": 107, "diego": 107, "lo": 107, "angel": 107, "jose": 107, "francisco": 107, "columns_drop": 107, "distinguish": 107, "curiou": [107, 185], "553": [107, 142], "062": 107, "coef": [107, 108, 133, 136, 137, 141], "est": [107, 133], "spot": [107, 133], "10000": 108, "100k": 108, "assert": [108, 117, 185], "un": [108, 133], "bin_var": 108, "randint": [108, 120, 124, 127], "rnd_bin": 108, "num_var": 108, "rnd_num": 108, "x_with_rnd_feat": 108, "x_train": 108, "x_test": 108, "y_train": [108, 174], "y_test": 108, "train_dataset": 108, "insert": [108, 131, 137], "kde": 108, "scatter_kw": 108, "x_i": 108, "versu": [108, 145, 165], "6013465992564662": 108, "5975757977248636": 108, "Its": 108, "somehow": 108, "rest": [108, 139, 163], "worth": 108, "habit": 108, "nb": 108, "outcom": [108, 141, 142, 156], "shall": [108, 110], "rise": 108, "80k": 108, "gaug": 108, "decad": 108, "visibl": [108, 145], "dev": 108, "601315755610292": 108, "5972410717953758": 108, "safe": 108, "perturb": 108, "repeatedkfold": 108, "cv_model": 108, "n_repeat": [108, 125, 126], "boxplot": [108, 130, 136], "cyan": 108, "satur": 108, "pretti": 108, "l1": 108, "015": 108, "5899811014945939": 108, "5769786920519312": 108, "partli": 108, "multivari": 108, "instabl": 108, "teas": 108, "9802456390477668": 108, "8472757276858796": 108, "formal": 108, "brought": 108, "argsort": [108, 125], "9797817485556926": 108, "8468741418387562": 108, "get_score_after_permut": 108, "curr_feat": 108, "x_permut": 108, "col_idx": 108, "permuted_scor": 108, "get_feature_import": 108, "baseline_score_train": 108, "permuted_score_train": 108, "feature_import": 108, "684": 108, "list_feature_import": 108, "n_round": 108, "672": 108, "0104": 108, "heavili": 108, "permutation_import": 108, "calcul": [108, 109, 142], "importances_mean": 108, "importances_std": 108, "plot_feature_import": 108, "perm_importance_result": 108, "feat_nam": 108, "xerr": 108, "perm_importance_result_train": 108, "realist": [108, 130, 136, 141], "unclear": 108, "culmen_column": [109, 131, 137, 141, 157, 158, 159, 163], "purposefulli": 109, "unlik": [109, 113, 122, 157], "misclassifi": [109, 132, 137], "decisiontreeclassifi": [109, 119, 143, 146, 157, 161, 163], "tab": [109, 110, 115, 131, 132, 136, 137, 141, 142, 157, 161, 162, 163], "decisiontreeclassifierifitteddecisiontreeclassifi": [109, 157, 163], "misclassified_samples_idx": 109, "flatnonzero": 109, "data_misclassifi": 109, "decisionboundarydisplai": [109, 131, 132, 137, 141, 157, 159, 161, 163], "response_method": [109, 131, 132, 137, 141, 157, 159, 161, 163], "rdbu": [109, 132, 161], "center": [109, 132, 134, 153, 156, 161], "nwith": [109, 115], "misclassif": [109, 137, 142], "sample_weight": 109, "trick": 109, "drastic": 109, "qualit": [109, 110, 128, 134, 163], "newly_misclassified_samples_idx": 109, "remaining_misclassified_samples_idx": 109, "intersect1d": 109, "ensemble_weight": 109, "935672514619883": 109, "6929824561403509": 109, "adaboostclassifi": 109, "samm": 109, "adaboostclassifierifittedadaboostclassifi": 109, "decisiontreeclassifierdecisiontreeclassifi": 109, "boosting_round": 109, "estimators_": [109, 110, 112, 121], "to_numpi": [109, 110, 121, 142], "640x480": 109, "estimator_weights_": 109, "58351894": 109, "46901998": 109, "03303773": 109, "estimator_errors_": 109, "05263158": 109, "05864198": 109, "08787269": 109, "sens": [109, 164], "generate_data": [110, 115], "x_min": [110, 115], "x_max": [110, 115], "capabl": [110, 115, 133, 142, 160, 162, 164], "y_pred": [110, 143, 145, 146], "data_bootstrap": 110, "target_bootstrap": 110, "bootstrap_sampl": 110, "bootstrap_indic": 110, "n_bootstrap": 110, "bootstrap_idx": 110, "facecolor": 110, "180": [110, 140], "linewidth": [110, 131, 132, 137, 161], "darker": [110, 131, 137, 141], "data_train_hug": 110, "data_test_hug": 110, "target_train_hug": 110, "100_000": 110, "data_bootstrap_sampl": 110, "target_bootstrap_sampl": 110, "ratio_unique_sampl": 110, "bag_of_tre": 110, "tree_idx": [110, 121], "tree_predict": [110, 121], "feed": 110, "bag_predict": 110, "unbroken": [110, 115], "whole": [110, 112, 117, 119, 121, 133, 139], "meta": 110, "wrap": [110, 133, 137, 165], "snippet": [110, 130, 136, 179], "smooth": [110, 132, 137], "bagged_tre": [110, 119], "bagged_trees_predict": 110, "opac": 110, "appreci": 110, "polynomialfeatur": [110, 129, 130, 132, 133, 135, 136, 139], "polynomial_regressor": 110, "1e": [110, 116, 131, 137, 151, 154], "intention": 110, "simpli": [110, 163], "regressor_predict": 110, "base_model_lin": 110, "bagging_predict": 110, "ylim": [110, 136, 142], "shade": 110, "randomizedsearchcv": [111, 117, 120, 149, 154, 156, 179], "penguins_regress": [112, 121, 128, 134, 138, 140, 158, 160, 161, 162, 164], "evenli": [112, 121], "170": [112, 121], "230": [112, 121], "newli": [112, 121], "conduct": [113, 122, 156], "learning_r": [113, 114, 117, 122, 123, 148, 150, 153, 154, 155, 179, 181], "slower": [113, 122, 135, 152], "offer": [113, 122, 154], "certainli": [113, 122], "n_iter_no_chang": [113, 122], "max_leaf_nod": [114, 117, 123, 148, 150, 152, 153, 154, 155, 161, 179, 181], "residu": [115, 117, 145, 154], "back": [115, 141, 142, 153, 157], "len_x": 115, "rand": [115, 139], "target_train_predict": 115, "target_test_predict": 115, "line_predict": 115, "lines_residu": 115, "edit": 115, "initi": [115, 152, 182], "tree_residu": 115, "target_train_predicted_residu": 115, "target_test_predicted_residu": 115, "manag": 115, "x_sampl": 115, "target_tru": 115, "target_true_residu": 115, "commit": [115, 145], "y_pred_first_tre": 115, "517": 115, "393": 115, "145": 115, "248": [115, 116, 146], "y_pred_first_and_second_tre": 115, "gradientboostingregressor": [115, 116, 122], "gradient_boost": [115, 116], "cv_results_gbdt": [115, 116], "446": 115, "919": 115, "982": 115, "007": [115, 116], "random_forest": [115, 119], "cv_results_rf": 115, "428": 115, "436": 115, "172": 115, "085": 115, "brute": [116, 138], "overcom": [116, 118, 132, 139], "benchmark": 116, "394": 116, "911": 116, "995": 116, "kbinsdiscret": [116, 132, 139], "n_bin": [116, 132, 139], "quantil": [116, 132], "data_tran": 116, "opt": [116, 133, 143, 146], "hostedtoolcach": [116, 133, 143, 146], "x64": [116, 133, 143, 146], "lib": [116, 133, 143, 146], "python3": [116, 133, 143, 146], "site": [116, 133, 143, 146], "_discret": 116, "userwarn": [116, 143, 146], "249": 116, "231": 116, "162": 116, "203": [116, 143, 146], "242": 116, "125": 116, "160": 116, "126": 116, "136": 116, "93": 116, "199": 116, "253": 116, "207": 116, "235": [116, 121, 164], "022": 116, "375": 116, "histogram_gradient_boost": 116, "cv_results_hgbdt": 116, "758": 116, "694": 116, "551": 116, "060": 116, "clariti": 117, "doubl": [117, 150, 151], "max_featur": [117, 119, 120], "grow": [117, 118, 161, 179], "uncorrel": 117, "constraint": [117, 133, 161], "min_samples_leaf": [117, 118, 153, 154, 161, 179], "branch": [117, 161], "promot": [117, 136], "altogeth": 117, "param_distribut": [117, 154, 156], "search_cv": 117, "n_iter": [117, 120, 149, 154, 156, 179], "param_": [117, 120, 123, 150, 154], "mean_test_error": [117, 120], "std_test_error": [117, 120], "cv_results_": [117, 120, 123, 150, 152, 154, 156, 179], "mean_test_scor": [117, 120, 123, 150, 152, 153, 154, 156, 179], "std_test_scor": [117, 120, 150, 152, 153, 154], "sort_valu": [117, 120, 150, 154, 156], "param_max_featur": [117, 120], "param_max_leaf_nod": 117, "param_min_samples_leaf": 117, "978155": 117, "564657": 117, "946351": 117, "544967": 117, "361681": 117, "392600": 117, "056250": 117, "529362": 117, "384198": 117, "653690": 117, "705012": 117, "557795": 117, "814857": 117, "973013": 117, "929450": 117, "916330": 117, "681239": 117, "385049": 117, "024546": 117, "781209": 117, "role": 117, "inter": 117, "refit": [117, 148, 152, 155], "overlook": 117, "loguniform": [117, 154], "param_max_it": 117, "param_learning_r": 117, "01864": 117, "059711": 117, "305289": 117, "047293": 117, "886194": 117, "294858": 117, "176656": 117, "620216": 117, "349642": 117, "297739": 117, "023759": 117, "825347": 117, "083745": 117, "104171": 117, "400591": 117, "215543": 117, "241217": 117, "301977": 117, "067503": 117, "780190": 117, "449252": 117, "05929": 117, "887688": 117, "400111": 117, "160519": 117, "337594": 117, "372942": 117, "125207": 117, "015150": 117, "814681": 117, "054511": 117, "191347": 117, "690748": 117, "248463": 117, "977311": 117, "593183": 117, "906226": 117, "187714": 117, "847621": 117, "061034": 117, "712506": 117, "707332": 117, "079415": 117, "447912": 117, "900105": 117, "0351": 117, "512730": 117, "998659": 117, "019923": 117, "645082": 117, "109315": 117, "039361": 117, "766862": 117, "042788": 117, "019351": 117, "341590": 117, "090469": 117, "01724": 117, "857731": 117, "137648": 117, "hgbt": 117, "hassl": 118, "354": 118, "087": [118, 135], "min_samples_split": [118, 161], "523": [118, 134], "107": 118, "bagging_regressor": 118, "642": 118, "083": 118, "decent": [118, 153, 154], "modif": 119, "inject": 119, "decorrel": 119, "categorical_encod": 119, "scores_tre": 119, "820": 119, "scores_bagged_tre": 119, "846": 119, "005": 119, "randomforestclassifi": [119, 125, 126], "scores_random_forest": 119, "004": 119, "disabl": 119, "sqrt": 119, "literatur": 119, "agnost": 119, "param": [120, 123, 132, 150, 153], "bootstrap_featur": 120, "estimator__ccp_alpha": 120, "estimator__criterion": 120, "estimator__max_depth": 120, "estimator__max_featur": 120, "estimator__max_leaf_nod": 120, "estimator__min_impurity_decreas": 120, "estimator__min_samples_leaf": 120, "estimator__min_samples_split": 120, "estimator__min_weight_fraction_leaf": 120, "estimator__monotonic_cst": 120, "estimator__random_st": 120, "estimator__splitt": 120, "max_sampl": 120, "oob_scor": 120, "verbos": [120, 151, 154, 156, 181], "warm_start": 120, "param_n_estim": 120, "param_max_sampl": 120, "param_estimator__max_depth": 120, "395300": 120, "200372": 120, "894554": 120, "132407": 120, "299403": 120, "007797": 120, "852611": 120, "845432": 120, "470246": 120, "165325": 120, "650233": 120, "928890": 120, "302352": 120, "151084": 120, "315767": 120, "849588": 120, "324889": 120, "077862": 120, "356723": 120, "042321": 120, "708955": 120, "161825": 120, "895668": 120, "964202": 120, "318367": 120, "415482": 120, "755615": 120, "456216": 120, "194714": 120, "829366": 120, "364199": 120, "091940": 120, "489622": 120, "884751": 120, "606614": 120, "405458": 120, "209962": 120, "954084": 120, "757686": 120, "885452": 120, "gram": [121, 129, 135, 138], "380": 121, "633": 121, "data_rang": 121, "forest_predict": 121, "n_estimators_": 122, "201": 122, "hist_gbdt": 123, "839": 123, "best_estimator_": 123, "528": 123, "447": 123, "576": 123, "290": 123, "414": 123, "index_column": 123, "inner_cv_result": 123, "cv_idx": 123, "search_cv_result": 123, "set_index": [123, 131, 137, 141, 147], "renam": [123, 150, 153, 154, 156, 179], "coincid": [123, 142], "bioinformat": [124, 127], "rna": [124, 127], "seq": [124, 127], "ten": [124, 127], "anova": [124, 125, 127], "feature_select": [124, 125, 126, 127], "selectkbest": [124, 125, 127], "f_classif": [124, 125, 127], "pre": [124, 127], "princip": 125, "make_classif": [125, 126], "n_inform": [125, 126], "n_redund": [125, 126], "univari": 125, "model_without_select": [125, 126], "model_with_select": [125, 126], "score_func": [125, 127], "cv_results_without_select": [125, 126], "incorpor": 125, "cv_results_with_select": [125, 126], "analyz": [125, 133, 179], "swap": 125, "swaplevel": [125, 126], "Of": 125, "scores_": 125, "percentil": 125, "alien": 125, "primari": 125, "feature_importances_": 126, "suffici": [126, 132], "class_sep": 126, "selectfrommodel": 126, "feature_selector": [126, 127], "overestim": 126, "100000": 127, "data_subset": 127, "940": 127, "succeed": 127, "legit": 127, "leak": 127, "data_train_subset": 127, "520": 127, "460": 127, "boilerpl": 127, "linear_model_flipper_mass": [128, 134, 140], "flipper_length": [128, 134, 140], "weight_flipper_length": [128, 134, 138, 140], "intercept_body_mass": [128, 134, 138, 140], "body_mass": [128, 134, 140], "flipper_length_rang": [128, 134, 138, 140], "goodness_fit_measur": [128, 134], "true_valu": [128, 134], "scalar": [128, 134], "model_idx": [128, 134], "x1": [129, 135, 141], "x2": [129, 135], "x3": [129, 135], "penguins_non_miss": [129, 135, 185], "181": [129, 135, 140], "186": [129, 135, 140], "195": [129, 135, 140], "193": [129, 135, 140, 154], "190": [129, 135, 140], "sign": [129, 135], "interaction_onli": [129, 130, 135, 136], "intermedi": [129, 135, 139, 152, 153], "moment": [130, 136, 157], "15024": [130, 136], "reload": [130, 136, 150, 154], "concern": [130, 136, 152], "named_transformers_": [130, 136], "get_feature_names_out": [130, 136], "metion": [131, 137], "infinit": [131, 137], "invers": [131, 137], "yourself": [131, 137], "penguins_train": [131, 137, 141], "penguins_test": [131, 137, 141], "vmin": [131, 132, 137, 150, 157, 163], "vmax": [131, 132, 137, 150, 157, 163], "plot_decision_boundari": [131, 132, 137], "logisticregression__c": [131, 137, 179, 181], "plot_method": [131, 132, 137], "pcolormesh": [131, 132, 137], "rdbu_r": [131, 137, 141], "contour": [131, 132, 137], "candid": [131, 137, 154, 156, 157], "cs": [131, 137], "1e6": [131, 137], "queri": [131, 133, 137], "kernel_approxim": [131, 132, 135, 137, 139], "nevertheless": 132, "moon": 132, "crescent": 132, "make_moon": 132, "newaxi": [132, 161], "data_moon": 132, "target_moon": 132, "gaussian": 132, "edg": 132, "concentr": 132, "make_gaussian_quantil": 132, "n_class": [132, 141, 163], "gauss": 132, "data_gauss": 132, "target_gauss": 132, "xor": 132, "OR": 132, "target_xor": 132, "logical_xor": 132, "int32": [132, 149, 156, 161], "data_xor": 132, "glanc": 132, "listedcolormap": 132, "constrained_layout": 132, "common_scatter_plot_param": 132, "middl": [132, 151], "set_ylabel": [132, 145, 163], "soft": [132, 141], "unsur": [132, 141], "attempt": [132, 133, 137], "leverag": 132, "spline": [132, 139], "onehot": 132, "kbinsdiscretizerkbinsdiscret": [132, 139], "segment": 132, "rectangular": 132, "drawn": 132, "n_knot": 132, "splinetransformersplinetransform": [132, 139], "favor": 132, "curvi": [132, 137], "knot": 132, "include_bia": [132, 133, 135, 136, 139], "polynomialfeaturespolynomialfeatur": [132, 133, 136, 139], "nystr\u00f6m": [132, 135], "coef0": [132, 141], "nystroemnystroem": [132, 139], "expans": [132, 139], "intract": 132, "radial": 132, "basi": 132, "furthemor": 132, "induct": 132, "rotation": 132, "everywher": [132, 137], "drawback": 132, "orign": 132, "despit": 132, "augment": [132, 133], "interplai": 132, "linear_regress": [133, 135, 138, 139, 164], "train_error": 133, "2e": 133, "85e": 133, "63e": 133, "69e": 133, "47e": 133, "fortun": 133, "feature_names_in_": 133, "model_first_fold": 133, "pipelineifittedpipelin": 133, "linearregressionlinearregress": [133, 139], "weights_linear_regress": 133, "symlog": 133, "homogen": 133, "choleski": 133, "_ridg": 133, "204": 133, "linalgwarn": 133, "rcond": 133, "59923e": 133, "linalg": 133, "xy": 133, "assume_a": 133, "po": [133, 142], "overwrite_a": 133, "59556e": 133, "59609e": 133, "11828e": 133, "06109e": 133, "60121e": 133, "61694e": 133, "59735e": 133, "59566e": 133, "72304e": 133, "60047e": 133, "59824e": 133, "59593e": 133, "59564e": 133, "5959e": 133, "59553e": 133, "59686e": 133, "60737e": 133, "5957e": 133, "60243e": 133, "90e": 133, "56e": 133, "55e": 133, "68e": 133, "weights_ridg": 133, "shrunk": 133, "worst": [133, 142], "saga": 133, "lsqr": 133, "re": [133, 177, 185], "resolv": 133, "omit": 133, "annual": 133, "neutral": [133, 163], "ahead": 133, "scaled_ridg": 133, "78e": 133, "21e": 133, "83e": 133, "17e": 133, "sweet": 133, "weights_ridge_scaled_data": 133, "ridge_large_alpha": 133, "1_000_000": 133, "unpredict": 133, "occurr": 133, "presenc": [133, 147], "divis": 133, "beforehand": 133, "store_cv_valu": 133, "12e": 133, "25e": 133, "50e": 133, "40e": 133, "mse_alpha": 133, "cv_values_": 133, "cv_alpha": 133, "000000e": 133, "841881e": 133, "347783e": 133, "321941e": 133, "837563e": 133, "343115e": 133, "747528e": 133, "831866e": 133, "336956e": 133, "310130e": 133, "824352e": 133, "328835e": 133, "053856e": 133, "814452e": 133, "318133e": 133, "274549e": 133, "319038e": 133, "337394e": 133, "328761e": 133, "324503e": 133, "338181e": 133, "722368e": 133, "328652e": 133, "338778e": 133, "564633e": 133, "331799e": 133, "339232e": 133, "334185e": 133, "339576e": 133, "yerr": 133, "yscale": 133, "salt": 133, "cook": 133, "best_alpha": 133, "11497569953977356": 133, "35111917342151344": 133, "1519911082952933": 133, "4641588833612782": 133, "08697490026177834": 133, "6135907273413176": 133, "stem": [133, 145], "summari": 133, "wasn": 133, "disproportion": 133, "15000": 134, "14000": 134, "predicted_body_mass": [134, 138, 140], "misleadingli": 134, "mse": [134, 139, 145, 147], "2764": 134, "854": 134, "338": 134, "573": 134, "041": 134, "337": 135, "071": 135, "868": 135, "poly_featur": 135, "linear_regression_interact": 135, "7077": 135, "3384": 135, "731": 135, "7347": 135, "3236": 135, "687": 135, "7858": 135, "3510": 135, "725": 135, "7083": 135, "3724": 135, "708": 135, "7467": 135, "3914": 135, "809": 135, "flipper_length_first_sampl": 135, "culmen_depth_first_sampl": 135, "301": 135, "790": 135, "340": 135, "spread": [135, 137, 153, 179], "enrich": 135, "nystroem_regress": [135, 139], "nystroem__n_compon": 135, "set_param": [135, 137, 151, 155, 181, 185], "299": 135, "874": 135, "4950": 135, "5050": 135, "footprint": 135, "scalabl": 135, "cv_results_lr": 136, "test_score_lr": 136, "79856704": 136, "79283521": 136, "79668305": 136, "80487305": 136, "80036855": 136, "79914005": 136, "79750205": 136, "7993448": 136, "80528256": 136, "80405405": 136, "causal": 136, "5_000": 136, "cv_results_complex_lr": 136, "test_score_complex_lr": 136, "85281474": 136, "85056295": 136, "84971335": 136, "8474611": 136, "84807535": 136, "84684685": 136, "85565111": 136, "8507371": 136, "85872236": 136, "8515561": 136, "workclass_infrequent_sklearn": 136, "education_infrequent_sklearn": 136, "status_": 136, "absent": 136, "widow": 136, "status_infrequent_sklearn": 136, "occupation_": 136, "adm": 136, "cleric": 136, "craft": 136, "repair": 136, "exec": 136, "manageri": 136, "handler": 136, "cleaner": 136, "tech": 136, "occupation_infrequent_sklearn": 136, "relationship_": 136, "race_": 136, "asian": 136, "pac": 136, "island": 136, "race_infrequent_sklearn": 136, "sex_": 136, "country_infrequent_sklearn": 136, "education_doctor": 136, "model_with_interact": 136, "cv_results_interact": 136, "test_score_interact": 136, "85383828": 136, "8527846": 136, "85298935": 136, "84930385": 136, "8503276": 136, "85462735": 136, "8523751": 136, "85176085": 136, "act": 136, "rapid": 136, "sigmoid": [137, 141], "nearli": 137, "steep": 137, "deduc": [137, 158], "lai": 137, "zone": 137, "weaker": 137, "light": 137, "lr_weight": 137, "perpendicular": [137, 157], "lowest": [137, 138, 145], "anywher": 137, "minor": 137, "blob": [137, 161], "frontier": 137, "conjunct": 137, "certainti": [137, 163], "linearregressionifittedlinearregress": 138, "68556640610011": 138, "5780": 138, "831358077066": 138, "mean_squared_error": [138, 139, 145], "inferred_body_mass": 138, "model_error": 138, "154546": 138, "313": 138, "occas": 139, "cubic": [139, 174], "said": [139, 145, 147], "data_max": 139, "data_min": 139, "len_data": 139, "sort": 139, "full_data": 139, "input_featur": 139, "reshap": [139, 145, 163], "fit_score_plot_regress": 139, "linearregressioninot": 139, "fittedlinearregress": 139, "global": 139, "data_expand": 139, "polynomial_expans": 139, "polynomial_regress": 139, "encourag": [139, 145], "svr": 139, "svrinot": 139, "fittedsvr": 139, "medium": 139, "10_000": [139, 157], "binned_regress": 139, "spline_regress": 139, "expand": 139, "3750": 140, "3800": 140, "3450": 140, "3650": 140, "2700": 140, "6300": 140, "heavier": [140, 158], "formula": 140, "shorter": 140, "13000": 140, "millimet": 140, "body_mass_180": 140, "body_mass_181": 140, "7200": 140, "7240": 140, "goe": [140, 142], "170mm": 140, "230mm": 140, "redefin": 140, "groupbi": 141, "inclin": 141, "x0": 141, "coef1": 141, "obliqu": [141, 157], "724791": 141, "096371": 141, "readi": 141, "barplot": 141, "horizont": [141, 161, 163], "hypothet": 141, "test_penguin": 141, "y_pred_proba": [141, 157], "1714923": 141, "8285077": 141, "y_proba_sampl": 141, "insist": 141, "overconfid": 141, "underconfid": 141, "asymptot": 141, "softmax": 141, "donor": 142, "ago": 142, "new_donor": 142, "That": [142, 147, 150, 152], "258": 142, "505": 142, "665": 142, "615": 142, "743": 142, "374": 142, "7780748663101604": 142, "accuracy_scor": 142, "778": 142, "finer": 142, "confusionmatrixdisplai": 142, "incorrect": 142, "erron": 142, "tp": 142, "tn": 142, "fn": 142, "fp": 142, "precision_scor": [142, 143, 146], "recall_scor": 142, "pos_label": [142, 143, 146], "688": 142, "124": 142, "mislabel": 142, "ratio": 142, "dummy_classifi": 142, "762": 142, "balanced_accuracy_scor": 142, "haven": 142, "target_proba_predict": 142, "271818": 142, "728182": 142, "451765": 142, "548235": 142, "445210": 142, "554790": 142, "441577": 142, "558423": 142, "870588": 142, "129412": 142, "equivalence_pred_proba": 142, "idxmax": 142, "graph": 142, "precisionrecalldisplai": 142, "tpr": 142, "ppv": 142, "ap": 142, "preval": 142, "ipykernel_5009": 142, "2781295333": 142, "__getitem__": 142, "ser": 142, "discrimin": 142, "roccurvedisplai": 142, "dash": 142, "plot_chance_level": 142, "pr": 142, "chance_level_kw": 142, "ambigu": [143, 146], "valueerror": [143, 146], "exc": [143, 146], "_valid": [143, 146], "1011": [143, 146], "recent": [143, 146], "_scorer": [143, 146], "137": [143, 145, 146], "__call__": [143, 146], "scorer": [143, 144, 146, 147], "_score": [143, 146], "345": [143, 146], "method_cal": [143, 146], "_cached_cal": [143, 146], "_get_response_valu": [143, 146], "_respons": [143, 146], "catch": [143, 146], "make_scor": [143, 146], "syntax": [144, 147], "iowa": 145, "intro": [145, 165], "996": 145, "902": 145, "2064": 145, "736": 145, "6872520581075443": 145, "dummy_regressor": 145, "608": 145, "disadvantag": 145, "median_absolute_error": 145, "mean_absolute_percentage_error": 145, "574": 145, "obsev": 145, "unobserv": 145, "extern": [145, 152], "cloud": 145, "against": 145, "exhibit": 145, "predictionerrordisplai": 145, "from_predict": 145, "y_true": 145, "actual_vs_predict": 145, "scatter_kwarg": 145, "residual_vs_predict": 145, "nwithout": 145, "banana": 145, "smile": 145, "clue": 145, "monoton": 145, "quantiletransform": [145, 185], "transformedtargetregressor": 145, "n_quantil": [145, 185], "900": 145, "output_distribut": 145, "model_transformed_target": 145, "ntransform": 145, "406": 145, "327": [145, 154], "disapprov": 145, "statistician": 145, "justifi": 145, "poissonregressor": 145, "tweedieregressor": 145, "reachabl": 145, "626": 146, "499": [146, 153, 154], "112": [146, 154], "166": 146, "00248313": 146, "00258851": 146, "00256777": 146, "002455": 146, "00250387": 146, "00245094": 146, "00240254": 146, "00262642": 146, "00248528": 146, "00246668": 146, "00242066": 146, "0023427": 146, "00231647": 146, "002321": 146, "00233245": 146, "00230694": 146, "00229311": 146, "00232673": 146, "0023334": 146, "00232935": 146, "test_accuraci": 146, "29333333": 146, "53333333": 146, "77333333": 146, "70666667": 146, "66216216": 146, "74324324": 146, "test_balanced_accuraci": 146, "42105263": 146, "48391813": 146, "66081871": 146, "3874269": 146, "43274854": 146, "44736842": 146, "55994152": 146, "73684211": 146, "49174407": 146, "50309598": 146, "794": 147, "892": 147, "225": 147, "test_r2": 147, "test_neg_mean_absolute_error": 147, "848721": 147, "256799": 147, "816374": 147, "084083": 147, "813513": 147, "113367": 147, "814138": 147, "448279": 147, "637473": 147, "370341": 147, "defaultdict": 147, "loss_funct": 147, "squared_error": 147, "absolute_error": 147, "loss_func": 147, "test_neg_mean_squared_error": 147, "243": 147, "923": 147, "344": [147, 154], "evolv": 147, "discontinu": 147, "surrog": 147, "substitut": 147, "log_loss": 147, "exhaust": [148, 155, 182], "cat_preprocessor": [148, 150, 152, 154, 155], "kneighborsregressor": [149, 156], "with_mean": [149, 156], "with_std": [149, 156], "dealt": 150, "ordinalencoderordinalencod": [150, 152, 154], "passthroughpassthrough": [150, 152, 154], "histgradientboostingclassifierhistgradientboostingclassifi": [150, 152, 154], "classifier__learning_r": [150, 152, 154, 155], "classifier__max_leaf_nod": [150, 152, 154, 155], "model_grid_search": [150, 152], "pipelinepipelin": [150, 152, 154], "charg": 150, "rapidli": 150, "ascend": [150, 154, 156], "mean_fit_tim": [150, 153], "std_fit_tim": [150, 153], "mean_score_tim": [150, 153], "std_score_tim": [150, 153], "param_classifier__learning_r": [150, 152, 153], "param_classifier__max_leaf_nod": [150, 152, 153], "split0_test_scor": [150, 153], "split1_test_scor": [150, 153], "rank_test_scor": [150, 152, 153, 154], "379503": 150, "037274": 150, "188996": 150, "010358": 150, "868912": 150, "867213": 150, "868063": 150, "000850": 150, "297463": 150, "004903": 150, "166850": 150, "000900": 150, "866783": 150, "866066": 150, "866425": 150, "000359": 150, "097578": 150, "001023": 150, "067673": 150, "004921": 150, "classifier__": 150, "854826": 150, "862899": 150, "858863": 150, "004036": 150, "122485": 150, "025753": 150, "074820": 150, "004542": 150, "853844": 150, "860934": 150, "857389": 150, "003545": 150, "208518": 150, "003173": 150, "111752": 150, "002506": 150, "852752": 150, "853781": 150, "853266": 150, "000515": 150, "shorten": 150, "param_classifier__": 150, "prefix": [150, 153], "column_result": [150, 154], "shorten_param": [150, 153, 154, 179], "__": [150, 151, 153, 154, 179], "rsplit": [150, 153, 154, 179], "851028": 150, "002707": 150, "843330": 150, "002917": 150, "817832": 150, "001124": 150, "797166": 150, "000715": 150, "618080": 150, "124277": 150, "549338": 150, "210599": 150, "283476": 150, "003775": 150, "heatmap": [150, 153], "pivoted_cv_result": 150, "pivot_t": 150, "ylgnbu": 150, "invert_yaxi": 150, "degrad": 150, "patholog": 150, "accordingli": 150, "hyperparamt": [150, 157], "recogniz": 151, "spell": 151, "classifier__c": [151, 179, 181], "hyperparameter_nam": 151, "preprocessor__copi": 151, "preprocessor__with_mean": 151, "preprocessor__with_std": 151, "classifier__class_weight": 151, "classifier__du": 151, "classifier__fit_intercept": 151, "classifier__intercept_sc": 151, "classifier__l1_ratio": 151, "classifier__max_it": 151, "classifier__multi_class": 151, "classifier__n_job": 151, "classifier__penalti": 151, "classifier__random_st": 151, "classifier__solv": 151, "classifier__tol": 151, "classifier__verbos": 151, "classifier__warm_start": 151, "001": [151, 154], "799": 151, "433321": 152, "068195": 152, "863241": 152, "429161": 152, "069802": 152, "860784": 152, "430613": 152, "069127": 152, "860360": [152, 153], "427582": 152, "070667": 152, "862408": [152, 153], "429843": 152, "068020": 152, "866912": 152, "863": 152, "embed": 152, "864195": 152, "000061": 152, "870910": 152, "869457": 152, "000819": 152, "866365": 152, "001822": 152, "877": 152, "schemat": 152, "green": [152, 157, 163], "rough": 152, "cv_test_scor": 152, "871": 152, "apprehend": 152, "cv_inner": 152, "cv_outer": 152, "cv_fold": 152, "estimator_in_fold": 152, "vote": 152, "randomized_search_result": [153, 154, 179], "param_classifier__l2_regular": 153, "param_classifier__max_bin": 153, "param_classifier__min_samples_leaf": 153, "split2_test_scor": 153, "split3_test_scor": 153, "split4_test_scor": 153, "540456": 153, "062725": 153, "052069": 153, "002661": 153, "467047": 153, "550075": 153, "classifier__l2_regular": [153, 154], "4670474863": 153, "856558": 153, "862271": 153, "857767": 153, "854491": 153, "856675": 153, "857552": 153, "002586": 153, "110536": 153, "033403": 153, "074142": 153, "002165": 153, "015449": 153, "001146": 153, "0154488709": 153, "758974": 153, "758941": 153, "758947": [153, 154], "000013": [153, 154], "323": [153, 157], "137484": 153, "053150": 153, "092993": 153, "029005": 153, "095093": 153, "004274": 153, "0950934559": 153, "783267": 153, "776413": 153, "779143": 153, "771341": 153, "010357": 153, "311": 153, "935108": 153, "202993": 153, "118105": 153, "023658": 153, "003621": 153, "001305": 153, "164": 153, "0036210968": 153, "255219": 153, "038301": 153, "056048": 153, "016736": 153, "000081": 153, "407382": 153, "1060737427": 153, "495": 153, "452411": 153, "023006": 153, "055563": 153, "000846": 153, "000075": 153, "364373": 153, "4813767874": 153, "858332": 153, "865001": 153, "862681": 153, "860770": 153, "861429": 153, "002258": 153, "133042": 153, "014456": 153, "078186": 153, "002199": 153, "065946": 153, "001222": 153, "0659455480": 153, "497": [153, 154], "911828": 153, "017167": 153, "076563": 153, "005130": 153, "460025": 153, "044408": 153, "4600250010": 153, "839907": 153, "849713": 153, "846847": 153, "846028": 153, "844390": 153, "845377": 153, "003234": 153, "140": 153, "498": 153, "168120": 153, "121819": 153, "061283": 153, "000760": 153, "000068": 153, "287904": 153, "227": 153, "146": 153, "7755366885": 153, "861881": 153, "859951": 153, "861862": 153, "862221": 153, "001623": 153, "823774": 153, "120686": 153, "060351": 153, "014958": 153, "445218": 153, "005112": 153, "4452178932": 153, "764569": 153, "765902": 153, "764947": 153, "765083": 153, "765281": 153, "000535": 153, "319": 153, "l2_regular": [153, 154, 179], "max_bin": [153, 154, 179], "score_bin": 153, "cut": [153, 161], "set_palett": 153, "ylgnbu_r": 153, "set_xscal": 153, "set_yscal": 153, "band": 153, "px": [153, 156, 179], "parallel_coordin": [153, 156, 179], "log10": [153, 179], "log2": [153, 179], "color_continuous_scal": [153, 156, 179], "yellow": [153, 163], "tick": 153, "invert": 153, "consecut": 154, "untract": 154, "situat": 154, "stochast": 154, "loguniform_int": 154, "__init__": 154, "_distribut": 154, "rv": 154, "arg": 154, "kwarg": 154, "processor": 154, "1e3": 154, "classifier__min_samples_leaf": 154, "classifier__max_bin": 154, "255": 154, "model_random_search": [154, 156], "962": 154, "histgradientboostingc": 154, "_distn_infrastructur": 154, "rv_continuous_frozen": 154, "0x7f677f4b86a0": 154, "0x7f677f4aab20": 154, "__main__": 154, "0x7f677f4bc3a0": 154, "0x7f677f4bc130": 154, "0x7f677f4aaaf0": 154, "randomizedsearchcvifittedrandomizedsearchcv": 154, "pprint": 154, "011954994705001769": 154, "04397988125308962": 154, "129": 154, "011955": 154, "04398": 154, "869018": 154, "002866": 154, "851199": 154, "845263": 154, "144": 154, "108": 154, "852693": 154, "003954": 154, "001512": 154, "196641": 154, "844067": 154, "002766": 154, "000012": 154, "009697": 154, "840354": 154, "003787": 154, "605": 154, "906164": 154, "083779": 154, "829735": 154, "004207": 154, "012292": 154, "011544": 154, "224": 154, "805547": 154, "001448": 154, "000707": 154, "001796": 154, "000117": 154, "001665": 154, "000032": 154, "001894": 154, "135": 154, "850228": 154, "399645": 154, "233": 154, "756572": 154, "001422": 154, "to_csv": 154, "208": 154, "011775": 154, "076653": 154, "871393": 154, "001588": 154, "343": 154, "000404": 154, "244503": 154, "229": 154, "871339": 154, "002741": 154, "994918": 154, "077047": 154, "192": 154, "870793": 154, "001993": 154, "328": 154, "036232": 154, "224702": 154, "236": 154, "869837": 154, "000808": 154, "733808": 154, "036786": 154, "241": 154, "869673": 154, "002417": 154, "232": 154, "000097": 154, "976823": 154, "448205": 154, "253714": 154, "000001": 154, "828574": 154, "000003": 154, "091079": 154, "000444": 154, "236325": 154, "344629": 154, "207156": 154, "357": 154, "000026": 154, "075318": 154, "241053": 154, "valuabl": 154, "allevi": 154, "best_scor": 155, "best_param": 155, "lr": 155, "mln": 155, "mean_scor": 155, "010": 155, "789": 155, "813": 155, "842": 155, "847": 155, "852": 155, "828": 155, "288": 155, "480": 155, "639": 155, "best_lr": 155, "best_mln": 155, "870": 155, "kneighborsregressor__n_neighbor": 156, "standardscaler__with_mean": 156, "standardscaler__with_std": 156, "welcom": 156, "column_name_map": 156, "param_kneighborsregressor__n_neighbor": 156, "param_standardscaler__with_mean": 156, "param_standardscaler__with_std": 156, "boolean": 156, "column_scal": 156, "687926": 156, "674812": 156, "668778": 156, "648317": 156, "629772": 156, "215": 156, "617295": 156, "464": 156, "567164": 156, "508809": 156, "486503": 156, "103390": 156, "061394": 156, "033122": 156, "017583": 156, "007987": 156, "002900": 156, "238830": 156, "tealros": 156, "kneighbor": 156, "mpl": [157, 163], "tab10_norm": [157, 163], "dbd": 157, "tab10": [157, 163], "norm": [157, 163], "plot_tre": [157, 159, 161, 162, 163], "class_nam": [157, 163], "impur": [157, 163], "inferior": 157, "superior": 157, "settabl": 157, "45mm": 157, "test_penguin_1": 157, "test_penguin_2": 157, "y_proba_class_0": 157, "adelie_proba": 157, "chinstrap_proba": 157, "gentoo_proba": 157, "037": 157, "disregard": 157, "test_penguin_3": 157, "63975155": 157, "32298137": 157, "03726708": 157, "fairli": 157, "palmer": 158, "anatom": 158, "depict": 158, "set_size_inch": 158, "superimpos": [160, 164], "data_clf_column": 161, "target_clf_column": 161, "data_clf": 161, "data_reg_column": 161, "target_reg_column": 161, "data_reg": 161, "fit_and_plot_classif": 161, "fit_and_plot_regress": 161, "tree_clf": 161, "tree_reg": 161, "adequ": 161, "asymmetri": 161, "make_blob": 161, "interlac": 161, "x_1": 161, "y_1": 161, "x_2": 161, "y_2": 161, "min_impurity_decreas": 161, "asymmetr": 161, "priori": 162, "3698": 162, "5032": 162, "tricki": 163, "spectr": 163, "purpl": 163, "xx": 163, "yy": 163, "meshgrid": 163, "xfull": 163, "proba": 163, "sharei": 163, "class_of_interest": 163, "imshow_handl": 163, "extent": 163, "colorbar": 163, "cax": 163, "binar": 163, "impress": 163, "target_predicted_linear_regress": 164, "target_predicted_tre": 164, "interpol": 164, "offset": 164, "175": 164, "shortest": 164, "longest": 164, "m3": [165, 178, 180], "m5": [165, 167, 168, 169, 176], "acknowledg": 165, "prune": 171, "children": 172, "increment": 173, "refin": 173, "na_filt": 177, "author": 177, "circular": 179, "budget": [179, 183], "badli": 179, "get_paramet": 181, "anim": 185, "param_valu": 185, "powertransform": 185, "all_preprocessor": 185, "cox": 185, "classifier__n_neighbor": 185, "forgot": 185}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"acknowledg": 0, "figur": 0, "attribut": [0, 2], "dataset": [1, 5, 73, 79, 80, 90, 103, 104, 105, 106, 107, 108, 152, 158], "descript": 1, "glossari": 2, "main": [2, 13, 22, 33, 39, 57, 71, 128, 134, 171, 183], "term": 2, "us": [2, 5, 10, 76, 85, 125, 126, 138, 150, 154], "thi": [2, 5], "cours": [2, 35], "api": 2, "classif": [2, 29, 141, 142, 157, 158, 167], "classifi": [2, 142], "cross": [2, 19, 20, 76, 85, 95, 96, 101], "valid": [2, 5, 19, 20, 58, 66, 76, 85, 95, 96, 101, 102], "data": [2, 5, 62, 64, 67, 73, 76, 79, 80, 81, 84, 85, 100, 128, 134], "matrix": [2, 142], "input": 2, "earli": 2, "stop": 2, "estim": [2, 101, 141, 163], "featur": [2, 31, 40, 81, 84, 87, 89, 108, 125, 126, 131, 132, 133, 137, 139, 165], "variabl": [2, 73, 84, 85, 87, 89, 108], "descriptor": 2, "covari": 2, "gener": [2, 102, 161], "perform": [2, 91, 165], "predict": [2, 5, 80, 84, 141, 142, 150, 152, 154, 163, 165], "statist": 2, "hyperparamet": [2, 11, 117, 119, 150, 151, 152, 153, 154, 161, 165, 168, 184], "infer": 2, "learn": [2, 5, 12, 21, 32, 35, 38, 49, 55, 58, 64, 69, 70, 80, 90, 95, 108, 110, 138, 140, 151, 165, 170, 182], "paramet": [2, 131, 133, 137, 161], "meta": 2, "model": [2, 5, 7, 8, 18, 37, 40, 44, 45, 47, 64, 66, 76, 80, 81, 85, 90, 91, 108, 118, 126, 128, 132, 133, 134, 141, 150, 152, 154, 165, 166, 169], "overfit": [2, 56, 61, 102], "predictor": 2, "regress": [2, 30, 132, 133, 138, 139, 140, 145, 158, 162, 176], "regressor": 2, "regular": [2, 44, 47, 131, 133, 137], "penal": 2, "sampl": [2, 94, 95], "instanc": 2, "observ": 2, "supervis": 2, "target": [2, 80], "label": [2, 5], "annot": 2, "test": [2, 53, 79, 80, 101], "set": [2, 151], "train": [2, 53, 79, 80, 101], "fit": [2, 64, 80, 81, 85], "transform": 2, "underfit": [2, 56, 61, 102], "unsupervis": 2, "other": [2, 161], "notebook": [3, 73, 76, 79, 80, 139], "time": [3, 5, 12, 21, 32, 38, 55, 70, 170, 182], "tabl": [4, 165], "content": [4, 165], "conclud": [5, 6, 165], "remark": [5, 6, 165], "last": 5, "lesson": [5, 108], "goal": 5, "The": [5, 73, 76, 103, 104, 105, 106, 107, 142, 158, 165], "big": 5, "messag": [5, 132], "mooc": [5, 35], "1": [5, 72, 108], "machin": [5, 49, 165], "pipelin": [5, 69, 84, 87, 89, 90, 110, 165], "2": [5, 59, 108], "adapt": [5, 109], "complex": [5, 110], "3": [5, 108, 185], "specif": [5, 85], "go": [5, 13, 22, 33, 39, 57, 71, 171, 183], "further": [5, 13, 22, 33, 39, 57, 71, 171, 183], "more": [5, 85, 101], "about": [5, 119], "scikit": [5, 35, 64, 69, 80, 90, 110, 138, 140, 151], "we": [5, 90], "ar": 5, "an": [5, 84], "open": 5, "sourc": 5, "commun": 5, "topic": 5, "have": 5, "cover": 5, "studi": 5, "bring": 5, "valu": 5, "bigger": 5, "pictur": 5, "beyond": 5, "evalu": [5, 76, 84, 85, 142, 152, 165], "matter": 5, "small": 5, "part": 5, "problem": [5, 163], "most": 5, "technic": 5, "craft": 5, "all": 5, "how": 5, "choic": [5, 19], "output": 5, "bias": 5, "versu": [5, 51, 54], "causal": 5, "societ": 5, "impact": [5, 131, 137], "intuit": [7, 8, 37, 45, 47, 166, 169], "ensembl": [7, 8, 9, 10, 11, 118, 165], "bag": [7, 110], "boost": [8, 9, 109, 115, 116, 117], "base": [9, 84, 85, 166, 169], "method": [10, 11], "bootstrap": [10, 110], "tune": [11, 117, 133, 150, 152, 154, 165, 178, 180], "modul": [12, 21, 32, 38, 55, 70, 170, 182], "overview": [12, 21, 32, 38, 55, 70, 170, 182], "what": [12, 21, 32, 38, 55, 70, 170, 182], "you": [12, 21, 32, 38, 55, 70, 170, 182], "befor": [12, 21, 32, 38, 55, 70, 170, 182], "get": [12, 21, 32, 38, 55, 70, 151, 170, 182], "start": [12, 21, 32, 38, 55, 70, 170, 182], "object": [12, 21, 32, 38, 55, 70, 170, 182], "schedul": [12, 21, 32, 38, 55, 70, 170, 182], "take": [13, 22, 33, 39, 57, 71, 108, 132, 171, 183], "awai": [13, 22, 33, 39, 57, 71, 108, 132, 171, 183], "wrap": [13, 17, 22, 28, 33, 39, 46, 57, 59, 71, 72, 171, 177, 183, 185], "up": [13, 17, 22, 28, 33, 39, 46, 57, 59, 71, 72, 116, 171, 177, 183, 185], "To": [13, 22, 33, 39, 57, 71, 171, 183], "quiz": [14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 172, 173, 174, 175, 177, 179, 181, 185], "m6": [14, 15, 16, 111, 112, 113, 114, 120, 121, 122, 123], "01": [14, 23, 41, 48, 60, 63, 74, 75, 92, 93, 97, 98, 111, 120, 124, 127, 128, 134, 148, 155, 159, 163, 172, 181], "question": [14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 172, 173, 174, 175, 177, 179, 181, 185], "02": [15, 24, 42, 52, 65, 77, 82, 112, 121, 129, 135, 143, 146, 149, 156, 160, 164, 173, 179], "03": [16, 25, 43, 50, 68, 78, 83, 113, 122, 130, 136, 144, 147, 174], "6": 17, "compar": [18, 53, 91], "simpl": [18, 91], "baselin": [18, 91, 142], "nest": [20, 96], "m7": [23, 24, 25, 26, 27, 93, 98, 143, 144, 146, 147], "04": [26, 86, 88, 114, 123, 131, 137, 175], "05": [27, 87, 89], "7": 28, "metric": [29, 30, 142], "caveat": 31, "select": [31, 84, 85, 125, 126, 165], "introduct": 35, "present": [35, 108], "welcom": 35, "follow": 35, "prerequisit": [35, 128, 134], "materi": 35, "social": 35, "network": 35, "linear": [37, 40, 44, 45, 47, 108, 131, 132, 133, 137, 138, 139, 140, 141, 165], "non": [40, 100, 131, 132, 137, 139], "engin": [40, 131, 132, 137, 139], "m4": [41, 42, 43, 128, 129, 130, 131, 134, 135, 136, 137], "4": 46, "intro": 48, "introduc": 49, "concept": [49, 165], "m2": [50, 52, 60, 92, 97], "bia": [51, 54], "varianc": [51, 54], "error": [53, 101], "trade": 54, "off": 54, "curv": [58, 95, 102], "tabular": 62, "explor": 62, "m1": [63, 65, 68, 74, 75, 77, 78, 82, 83, 86, 87, 88, 89], "numer": [64, 79, 81, 85, 87, 89], "handl": 67, "categor": [67, 84, 85, 87, 89], "visual": [69, 73, 90], "jupyt": [69, 90], "first": [73, 80, 90], "look": [73, 119], "our": [73, 84, 150, 152, 154], "load": [73, 79, 80, 90, 128, 134, 152], "adult": [73, 103], "censu": [73, 103], "column": [73, 85], "inspect": [73, 108], "creat": [73, 90, 161], "decis": [73, 115, 117, 131, 137, 157, 161, 162, 165, 167, 168, 176], "rule": 73, "hand": 73, "recap": [73, 76, 79, 80, 139], "exercis": [74, 75, 77, 78, 82, 83, 86, 87, 88, 89, 92, 93, 97, 98, 111, 112, 113, 114, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 134, 135, 136, 137, 143, 144, 146, 147, 148, 149, 155, 156, 159, 160, 163, 164], "solut": [75, 82, 83, 88, 89, 97, 98, 120, 121, 122, 123, 127, 134, 135, 136, 137, 146, 147, 155, 156, 163, 164], "prepar": [76, 81], "need": 76, "work": 79, "entir": 79, "identifi": [79, 84], "split": [79, 80], "panda": 80, "separ": 80, "make": 80, "preprocess": 81, "encod": [84, 87, 89], "type": [84, 85], "strategi": 84, "categori": [84, 87, 89], "ordin": 84, "nomin": 84, "without": [84, 140, 152], "assum": 84, "ani": 84, "order": 84, "choos": 84, "togeth": 85, "dispatch": 85, "processor": 85, "power": 85, "refer": [87, 89], "scale": [87, 89, 108, 133], "integ": [87, 89], "code": [87, 89], "One": [87, 89], "hot": [87, 89], "analysi": [89, 153, 184], "Then": 90, "final": 90, "score": 90, "group": 94, "effect": [95, 133, 161], "size": 95, "summari": [95, 101, 102, 132], "stratif": 99, "i": 100, "d": 100, "framework": 101, "vs": [101, 102], "stabil": 101, "detail": [101, 119], "regard": 101, "cross_valid": 101, "am": 104, "hous": [104, 107], "bike": 105, "ride": 105, "blood": 106, "transfus": 106, "california": 107, "import": [108, 161], "0": 108, "sign": 108, "coeffici": 108, "A": [108, 119], "surpris": 108, "associ": 108, "check": 108, "spars": 108, "lasso": 108, "randomforest": 108, "feature_importances_": 108, "permut": 108, "discuss": 108, "adaboost": 109, "resampl": 110, "aggreg": 110, "gradient": [115, 116, 117], "tree": [115, 117, 157, 161, 162, 165, 166, 167, 168, 169, 176], "gbdt": 115, "speed": 116, "random": [117, 119, 154], "forest": [117, 119], "histogram": 117, "introductori": 118, "exampl": 118, "default": 119, "benefit": 125, "limit": 126, "definit": [128, 134], "influenc": [131, 137], "c": [131, 137], "boundari": [131, 137], "weight": [131, 137], "logist": 132, "addit": 132, "interact": 132, "multi": [132, 163], "step": 132, "probabl": [141, 142, 163], "accuraci": 142, "confus": 142, "deriv": 142, "issu": 142, "class": [142, 163], "imbal": 142, "differ": 142, "threshold": 142, "m3": [148, 149, 155, 156, 179, 181], "grid": 150, "search": [150, 153, 154, 184], "With": 152, "result": [153, 184], "build": 157, "penguin": 158, "m5": [159, 160, 163, 164, 172, 173, 174, 175], "helper": 161, "function": 161, "max_depth": 161, "best": 165, "appendix": 165, "interpret": 165, "5": 177, "autom": 178, "manual": 180}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}}) \ No newline at end of file +Search.setIndex({"docnames": ["appendix/acknowledgement", "appendix/datasets_intro", "appendix/glossary", "appendix/notebook_timings", "appendix/toc_redirect", "concluding_remarks", "concluding_remarks_video", "ensemble/bagging_slides", "ensemble/boosting_slides", "ensemble/ensemble_boosting_index", "ensemble/ensemble_bootstrap_index", "ensemble/ensemble_hyperparameters_index", "ensemble/ensemble_module_intro", "ensemble/ensemble_module_take_away", "ensemble/ensemble_quiz_m6_01", "ensemble/ensemble_quiz_m6_02", "ensemble/ensemble_quiz_m6_03", "ensemble/ensemble_wrap_up_quiz", "evaluation/cross_validation_baseline_index", "evaluation/cross_validation_choices_index", "evaluation/cross_validation_nested_index", "evaluation/evaluation_module_intro", "evaluation/evaluation_module_take_away", "evaluation/evaluation_quiz_m7_01", "evaluation/evaluation_quiz_m7_02", "evaluation/evaluation_quiz_m7_03", "evaluation/evaluation_quiz_m7_04", "evaluation/evaluation_quiz_m7_05", "evaluation/evaluation_wrap_up_quiz", "evaluation/metrics_classification_index", "evaluation/metrics_regression_index", "feature_selection/feature_selection_limitation_index", "feature_selection/feature_selection_module_intro", "feature_selection/feature_selection_module_take_away", "feature_selection/feature_selection_quiz", "index", "interpretation/interpretation_quiz", "linear_models/linear_models_intuitions_index", "linear_models/linear_models_module_intro", "linear_models/linear_models_module_take_away", "linear_models/linear_models_non_linear_index", "linear_models/linear_models_quiz_m4_01", "linear_models/linear_models_quiz_m4_02", "linear_models/linear_models_quiz_m4_03", "linear_models/linear_models_regularization_index", "linear_models/linear_models_slides", "linear_models/linear_models_wrap_up_quiz", "linear_models/regularized_linear_models_slides", "ml_concepts/quiz_intro_01", "ml_concepts/slides", "overfit/bias_vs_variance_quiz_m2_03", "overfit/bias_vs_variance_slides", "overfit/learning_validation_curves_quiz_m2_02", "overfit/learning_validation_curves_slides", "overfit/overfit_bias_variance_index", "overfit/overfit_module_intro", "overfit/overfit_overfitting_underfitting_index", "overfit/overfit_take_away", "overfit/overfit_validation_learning_curves_index", "overfit/overfit_wrap_up_quiz", "overfit/overfitting_vs_under_fitting_quiz_m2_01", "overfit/overfitting_vs_under_fitting_slides", "predictive_modeling_pipeline/01_tabular_data_exploration_index", "predictive_modeling_pipeline/01_tabular_data_exploration_quiz_m1_01", "predictive_modeling_pipeline/02_numerical_pipeline_index", "predictive_modeling_pipeline/02_numerical_pipeline_quiz_m1_02", "predictive_modeling_pipeline/02_numerical_pipeline_video_cross_validation", "predictive_modeling_pipeline/03_categorical_pipeline_index", "predictive_modeling_pipeline/03_categorical_pipeline_quiz_m1_03", "predictive_modeling_pipeline/03_categorical_pipeline_visualization_video", "predictive_modeling_pipeline/predictive_modeling_module_intro", "predictive_modeling_pipeline/predictive_modeling_module_take_away", "predictive_modeling_pipeline/wrap_up_quiz", "python_scripts/01_tabular_data_exploration", "python_scripts/01_tabular_data_exploration_ex_01", "python_scripts/01_tabular_data_exploration_sol_01", "python_scripts/02_numerical_pipeline_cross_validation", "python_scripts/02_numerical_pipeline_ex_00", "python_scripts/02_numerical_pipeline_ex_01", "python_scripts/02_numerical_pipeline_hands_on", "python_scripts/02_numerical_pipeline_introduction", "python_scripts/02_numerical_pipeline_scaling", "python_scripts/02_numerical_pipeline_sol_00", "python_scripts/02_numerical_pipeline_sol_01", "python_scripts/03_categorical_pipeline", "python_scripts/03_categorical_pipeline_column_transformer", "python_scripts/03_categorical_pipeline_ex_01", "python_scripts/03_categorical_pipeline_ex_02", "python_scripts/03_categorical_pipeline_sol_01", "python_scripts/03_categorical_pipeline_sol_02", "python_scripts/03_categorical_pipeline_visualization", "python_scripts/cross_validation_baseline", "python_scripts/cross_validation_ex_01", "python_scripts/cross_validation_ex_02", "python_scripts/cross_validation_grouping", "python_scripts/cross_validation_learning_curve", "python_scripts/cross_validation_nested", "python_scripts/cross_validation_sol_01", "python_scripts/cross_validation_sol_02", "python_scripts/cross_validation_stratification", "python_scripts/cross_validation_time", "python_scripts/cross_validation_train_test", "python_scripts/cross_validation_validation_curve", "python_scripts/datasets_adult_census", "python_scripts/datasets_ames_housing", "python_scripts/datasets_bike_rides", "python_scripts/datasets_blood_transfusion", "python_scripts/datasets_california_housing", "python_scripts/dev_features_importance", "python_scripts/ensemble_adaboost", "python_scripts/ensemble_bagging", "python_scripts/ensemble_ex_01", "python_scripts/ensemble_ex_02", "python_scripts/ensemble_ex_03", "python_scripts/ensemble_ex_04", "python_scripts/ensemble_gradient_boosting", "python_scripts/ensemble_hist_gradient_boosting", "python_scripts/ensemble_hyperparameters", "python_scripts/ensemble_introduction", "python_scripts/ensemble_random_forest", "python_scripts/ensemble_sol_01", "python_scripts/ensemble_sol_02", "python_scripts/ensemble_sol_03", "python_scripts/ensemble_sol_04", "python_scripts/feature_selection_ex_01", "python_scripts/feature_selection_introduction", "python_scripts/feature_selection_limitation_model", "python_scripts/feature_selection_sol_01", "python_scripts/linear_models_ex_01", "python_scripts/linear_models_ex_02", "python_scripts/linear_models_ex_03", "python_scripts/linear_models_ex_04", "python_scripts/linear_models_feature_engineering_classification", "python_scripts/linear_models_regularization", "python_scripts/linear_models_sol_01", "python_scripts/linear_models_sol_02", "python_scripts/linear_models_sol_03", "python_scripts/linear_models_sol_04", "python_scripts/linear_regression_in_sklearn", "python_scripts/linear_regression_non_linear_link", "python_scripts/linear_regression_without_sklearn", "python_scripts/logistic_regression", "python_scripts/metrics_classification", "python_scripts/metrics_ex_01", "python_scripts/metrics_ex_02", "python_scripts/metrics_regression", "python_scripts/metrics_sol_01", "python_scripts/metrics_sol_02", "python_scripts/parameter_tuning_ex_02", "python_scripts/parameter_tuning_ex_03", "python_scripts/parameter_tuning_grid_search", "python_scripts/parameter_tuning_manual", "python_scripts/parameter_tuning_nested", "python_scripts/parameter_tuning_parallel_plot", "python_scripts/parameter_tuning_randomized_search", "python_scripts/parameter_tuning_sol_02", "python_scripts/parameter_tuning_sol_03", "python_scripts/trees_classification", "python_scripts/trees_dataset", "python_scripts/trees_ex_01", "python_scripts/trees_ex_02", "python_scripts/trees_hyperparameters", "python_scripts/trees_regression", "python_scripts/trees_sol_01", "python_scripts/trees_sol_02", "toc", "trees/slides", "trees/trees_classification_index", "trees/trees_hyperparameters_index", "trees/trees_intuitions_index", "trees/trees_module_intro", "trees/trees_module_take_away", "trees/trees_quiz_m5_01", "trees/trees_quiz_m5_02", "trees/trees_quiz_m5_03", "trees/trees_quiz_m5_04", "trees/trees_regression_index", "trees/trees_wrap_up_quiz", "tuning/parameter_tuning_automated_index", "tuning/parameter_tuning_automated_quiz_m3_02", "tuning/parameter_tuning_manual_index", "tuning/parameter_tuning_manual_quiz_m3_01", "tuning/parameter_tuning_module_intro", "tuning/parameter_tuning_module_take_away", "tuning/parameter_tuning_parallel_plot_video", "tuning/parameter_tuning_wrap_up_quiz"], "filenames": ["appendix/acknowledgement.md", "appendix/datasets_intro.md", "appendix/glossary.md", "appendix/notebook_timings.md", "appendix/toc_redirect.md", "concluding_remarks.md", "concluding_remarks_video.md", "ensemble/bagging_slides.md", "ensemble/boosting_slides.md", "ensemble/ensemble_boosting_index.md", "ensemble/ensemble_bootstrap_index.md", "ensemble/ensemble_hyperparameters_index.md", "ensemble/ensemble_module_intro.md", "ensemble/ensemble_module_take_away.md", "ensemble/ensemble_quiz_m6_01.md", "ensemble/ensemble_quiz_m6_02.md", "ensemble/ensemble_quiz_m6_03.md", "ensemble/ensemble_wrap_up_quiz.md", "evaluation/cross_validation_baseline_index.md", "evaluation/cross_validation_choices_index.md", "evaluation/cross_validation_nested_index.md", "evaluation/evaluation_module_intro.md", "evaluation/evaluation_module_take_away.md", "evaluation/evaluation_quiz_m7_01.md", "evaluation/evaluation_quiz_m7_02.md", "evaluation/evaluation_quiz_m7_03.md", "evaluation/evaluation_quiz_m7_04.md", "evaluation/evaluation_quiz_m7_05.md", "evaluation/evaluation_wrap_up_quiz.md", "evaluation/metrics_classification_index.md", "evaluation/metrics_regression_index.md", "feature_selection/feature_selection_limitation_index.md", "feature_selection/feature_selection_module_intro.md", "feature_selection/feature_selection_module_take_away.md", "feature_selection/feature_selection_quiz.md", "index.md", "interpretation/interpretation_quiz.md", "linear_models/linear_models_intuitions_index.md", "linear_models/linear_models_module_intro.md", "linear_models/linear_models_module_take_away.md", "linear_models/linear_models_non_linear_index.md", "linear_models/linear_models_quiz_m4_01.md", "linear_models/linear_models_quiz_m4_02.md", "linear_models/linear_models_quiz_m4_03.md", "linear_models/linear_models_regularization_index.md", "linear_models/linear_models_slides.md", "linear_models/linear_models_wrap_up_quiz.md", "linear_models/regularized_linear_models_slides.md", "ml_concepts/quiz_intro_01.md", "ml_concepts/slides.md", "overfit/bias_vs_variance_quiz_m2_03.md", "overfit/bias_vs_variance_slides.md", "overfit/learning_validation_curves_quiz_m2_02.md", "overfit/learning_validation_curves_slides.md", "overfit/overfit_bias_variance_index.md", "overfit/overfit_module_intro.md", "overfit/overfit_overfitting_underfitting_index.md", "overfit/overfit_take_away.md", "overfit/overfit_validation_learning_curves_index.md", "overfit/overfit_wrap_up_quiz.md", "overfit/overfitting_vs_under_fitting_quiz_m2_01.md", "overfit/overfitting_vs_under_fitting_slides.md", "predictive_modeling_pipeline/01_tabular_data_exploration_index.md", "predictive_modeling_pipeline/01_tabular_data_exploration_quiz_m1_01.md", "predictive_modeling_pipeline/02_numerical_pipeline_index.md", "predictive_modeling_pipeline/02_numerical_pipeline_quiz_m1_02.md", "predictive_modeling_pipeline/02_numerical_pipeline_video_cross_validation.md", "predictive_modeling_pipeline/03_categorical_pipeline_index.md", "predictive_modeling_pipeline/03_categorical_pipeline_quiz_m1_03.md", "predictive_modeling_pipeline/03_categorical_pipeline_visualization_video.md", "predictive_modeling_pipeline/predictive_modeling_module_intro.md", "predictive_modeling_pipeline/predictive_modeling_module_take_away.md", "predictive_modeling_pipeline/wrap_up_quiz.md", "python_scripts/01_tabular_data_exploration.py", "python_scripts/01_tabular_data_exploration_ex_01.py", "python_scripts/01_tabular_data_exploration_sol_01.py", "python_scripts/02_numerical_pipeline_cross_validation.py", "python_scripts/02_numerical_pipeline_ex_00.py", "python_scripts/02_numerical_pipeline_ex_01.py", "python_scripts/02_numerical_pipeline_hands_on.py", "python_scripts/02_numerical_pipeline_introduction.py", "python_scripts/02_numerical_pipeline_scaling.py", "python_scripts/02_numerical_pipeline_sol_00.py", "python_scripts/02_numerical_pipeline_sol_01.py", "python_scripts/03_categorical_pipeline.py", "python_scripts/03_categorical_pipeline_column_transformer.py", "python_scripts/03_categorical_pipeline_ex_01.py", "python_scripts/03_categorical_pipeline_ex_02.py", "python_scripts/03_categorical_pipeline_sol_01.py", "python_scripts/03_categorical_pipeline_sol_02.py", "python_scripts/03_categorical_pipeline_visualization.py", "python_scripts/cross_validation_baseline.py", "python_scripts/cross_validation_ex_01.py", "python_scripts/cross_validation_ex_02.py", "python_scripts/cross_validation_grouping.py", "python_scripts/cross_validation_learning_curve.py", "python_scripts/cross_validation_nested.py", "python_scripts/cross_validation_sol_01.py", "python_scripts/cross_validation_sol_02.py", "python_scripts/cross_validation_stratification.py", "python_scripts/cross_validation_time.py", "python_scripts/cross_validation_train_test.py", "python_scripts/cross_validation_validation_curve.py", "python_scripts/datasets_adult_census.py", "python_scripts/datasets_ames_housing.py", "python_scripts/datasets_bike_rides.py", "python_scripts/datasets_blood_transfusion.py", "python_scripts/datasets_california_housing.py", "python_scripts/dev_features_importance.py", "python_scripts/ensemble_adaboost.py", "python_scripts/ensemble_bagging.py", "python_scripts/ensemble_ex_01.py", "python_scripts/ensemble_ex_02.py", "python_scripts/ensemble_ex_03.py", "python_scripts/ensemble_ex_04.py", "python_scripts/ensemble_gradient_boosting.py", "python_scripts/ensemble_hist_gradient_boosting.py", "python_scripts/ensemble_hyperparameters.py", "python_scripts/ensemble_introduction.py", "python_scripts/ensemble_random_forest.py", "python_scripts/ensemble_sol_01.py", "python_scripts/ensemble_sol_02.py", "python_scripts/ensemble_sol_03.py", "python_scripts/ensemble_sol_04.py", "python_scripts/feature_selection_ex_01.py", "python_scripts/feature_selection_introduction.py", "python_scripts/feature_selection_limitation_model.py", "python_scripts/feature_selection_sol_01.py", "python_scripts/linear_models_ex_01.py", "python_scripts/linear_models_ex_02.py", "python_scripts/linear_models_ex_03.py", "python_scripts/linear_models_ex_04.py", "python_scripts/linear_models_feature_engineering_classification.py", "python_scripts/linear_models_regularization.py", "python_scripts/linear_models_sol_01.py", "python_scripts/linear_models_sol_02.py", "python_scripts/linear_models_sol_03.py", "python_scripts/linear_models_sol_04.py", "python_scripts/linear_regression_in_sklearn.py", "python_scripts/linear_regression_non_linear_link.py", "python_scripts/linear_regression_without_sklearn.py", "python_scripts/logistic_regression.py", "python_scripts/metrics_classification.py", "python_scripts/metrics_ex_01.py", "python_scripts/metrics_ex_02.py", "python_scripts/metrics_regression.py", "python_scripts/metrics_sol_01.py", "python_scripts/metrics_sol_02.py", "python_scripts/parameter_tuning_ex_02.py", "python_scripts/parameter_tuning_ex_03.py", "python_scripts/parameter_tuning_grid_search.py", "python_scripts/parameter_tuning_manual.py", "python_scripts/parameter_tuning_nested.py", "python_scripts/parameter_tuning_parallel_plot.py", "python_scripts/parameter_tuning_randomized_search.py", "python_scripts/parameter_tuning_sol_02.py", "python_scripts/parameter_tuning_sol_03.py", "python_scripts/trees_classification.py", "python_scripts/trees_dataset.py", "python_scripts/trees_ex_01.py", "python_scripts/trees_ex_02.py", "python_scripts/trees_hyperparameters.py", "python_scripts/trees_regression.py", "python_scripts/trees_sol_01.py", "python_scripts/trees_sol_02.py", "toc.md", "trees/slides.md", "trees/trees_classification_index.md", "trees/trees_hyperparameters_index.md", "trees/trees_intuitions_index.md", "trees/trees_module_intro.md", "trees/trees_module_take_away.md", "trees/trees_quiz_m5_01.md", "trees/trees_quiz_m5_02.md", "trees/trees_quiz_m5_03.md", "trees/trees_quiz_m5_04.md", "trees/trees_regression_index.md", "trees/trees_wrap_up_quiz.md", "tuning/parameter_tuning_automated_index.md", "tuning/parameter_tuning_automated_quiz_m3_02.md", "tuning/parameter_tuning_manual_index.md", "tuning/parameter_tuning_manual_quiz_m3_01.md", "tuning/parameter_tuning_module_intro.md", "tuning/parameter_tuning_module_take_away.md", "tuning/parameter_tuning_parallel_plot_video.md", "tuning/parameter_tuning_wrap_up_quiz.md"], "titles": ["Acknowledgement", "Datasets description", "Glossary", "Notebook timings", "Table of contents", "Concluding remarks", "\ud83c\udfa5 Concluding remarks", "\ud83c\udfa5 Intuitions on ensemble models: bagging", "\ud83c\udfa5 Intuitions on ensemble models: boosting", "Ensemble based on boosting", "Ensemble method using bootstrapping", "Hyperparameter tuning with ensemble methods", "Module overview", "Main take-away", "\u2705 Quiz M6.01", "\u2705 Quiz M6.02", "\u2705 Quiz M6.03", "\ud83c\udfc1 Wrap-up quiz 6", "Comparing a model with simple baselines", "Choice of cross-validation", "Nested cross-validation", "Module overview", "Main take-away", "\u2705 Quiz M7.01", "\u2705 Quiz M7.02", "\u2705 Quiz M7.03", "\u2705 Quiz M7.04", "\u2705 Quiz M7.05", "\ud83c\udfc1 Wrap-up quiz 7", "Classification metrics", "Regression metrics", "Caveats of feature selection", "Module overview", "Main take-away", "\u2705 Quiz", "Introduction", "\u2705 Quiz", "Intuitions on linear models", "Module overview", "Main take-away", "Non-linear feature engineering for linear models", "\u2705 Quiz M4.01", "\u2705 Quiz M4.02", "\u2705 Quiz M4.03", "Regularization in linear model", "\ud83c\udfa5 Intuitions on linear models", "\ud83c\udfc1 Wrap-up quiz 4", "\ud83c\udfa5 Intuitions on regularized linear models", "\u2705 Quiz Intro.01", "\ud83c\udfa5 Introducing machine-learning concepts", "\u2705 Quiz M2.03", "\ud83c\udfa5 Bias versus Variance", "\u2705 Quiz M2.02", "\ud83c\udfa5 Comparing train and test errors", "Bias versus variance trade-off", "Module overview", "Overfitting and underfitting", "Main take-away", "Validation and learning curves", "\ud83c\udfc1 Wrap-up quiz 2", "\u2705 Quiz M2.01", "\ud83c\udfa5 Overfitting and Underfitting", "Tabular data exploration", "\u2705 Quiz M1.01", "Fitting a scikit-learn model on numerical data", "\u2705 Quiz M1.02", "\ud83c\udfa5 Validation of a model", "Handling categorical data", "\u2705 Quiz M1.03", "\ud83c\udfa5 Visualizing scikit-learn pipelines in Jupyter", "Module overview", "Main take-away", "\ud83c\udfc1 Wrap-up quiz 1", "First look at our dataset", "\ud83d\udcdd Exercise M1.01", "\ud83d\udcc3 Solution for Exercise M1.01", "Model evaluation using cross-validation", "\ud83d\udcdd Exercise M1.02", "\ud83d\udcdd Exercise M1.03", "Working with numerical data", "First model with scikit-learn", "Preprocessing for numerical features", "\ud83d\udcc3 Solution for Exercise M1.02", "\ud83d\udcc3 Solution for Exercise M1.03", "Encoding of categorical variables", "Using numerical and categorical variables together", "\ud83d\udcdd Exercise M1.04", "\ud83d\udcdd Exercise M1.05", "\ud83d\udcc3 Solution for Exercise M1.04", "\ud83d\udcc3 Solution for Exercise M1.05", "Visualizing scikit-learn pipelines in Jupyter", "Comparing model performance with a simple baseline", "\ud83d\udcdd Exercise M2.01", "\ud83d\udcdd Exercise M7.01", "Sample grouping", "Effect of the sample size in cross-validation", "Nested cross-validation", "\ud83d\udcc3 Solution for Exercise M2.01", "\ud83d\udcc3 Solution for Exercise M7.01", "Stratification", "Non i.i.d. data", "Cross-validation framework", "Overfit-generalization-underfit", "The adult census dataset", "The Ames housing dataset", "The bike rides dataset", "The blood transfusion dataset", "The California housing dataset", "Feature importance", "Adaptive Boosting (AdaBoost)", "Bagging", "\ud83d\udcdd Exercise M6.01", "\ud83d\udcdd Exercise M6.02", "\ud83d\udcdd Exercise M6.03", "\ud83d\udcdd Exercise M6.04", "Gradient-boosting decision tree (GBDT)", "Speeding-up gradient-boosting", "Hyperparameter tuning", "Introductory example to ensemble models", "Random forests", "\ud83d\udcc3 Solution for Exercise M6.01", "\ud83d\udcc3 Solution for Exercise M6.02", "\ud83d\udcc3 Solution for Exercise M6.03", "\ud83d\udcc3 Solution for Exercise M6.04", "\ud83d\udcdd Exercise 01", "Benefits of using feature selection", "Limitation of selecting feature using a model", "\ud83d\udcc3 Solution for Exercise 01", "\ud83d\udcdd Exercise M4.01", "\ud83d\udcdd Exercise M4.02", "\ud83d\udcdd Exercise M4.03", "\ud83d\udcdd Exercise M4.04", "Non-linear feature engineering for Logistic Regression", "Regularization of linear regression model", "\ud83d\udcc3 Solution for Exercise M4.01", "\ud83d\udcc3 Solution for Exercise M4.02", "\ud83d\udcc3 Solution for Exercise M4.03", "\ud83d\udcc3 Solution for Exercise M4.04", "Linear regression using scikit-learn", "Non-linear feature engineering for Linear Regression", "Linear regression without scikit-learn", "Linear models for classification", "Classification", "\ud83d\udcdd Exercise M7.02", "\ud83d\udcdd Exercise M7.03", "Regression", "\ud83d\udcc3 Solution for Exercise M7.02", "\ud83d\udcc3 Solution for Exercise M7.03", "\ud83d\udcdd Exercise M3.01", "\ud83d\udcdd Exercise M3.02", "Hyperparameter tuning by grid-search", "Set and get hyperparameters in scikit-learn", "Evaluation and hyperparameter tuning", "Analysis of hyperparameter search results", "Hyperparameter tuning by randomized-search", "\ud83d\udcc3 Solution for Exercise M3.01", "\ud83d\udcc3 Solution for Exercise M3.02", "Build a classification decision tree", "The penguins datasets", "\ud83d\udcdd Exercise M5.01", "\ud83d\udcdd Exercise M5.02", "Importance of decision tree hyperparameters on generalization", "Decision tree for regression", "\ud83d\udcc3 Solution for Exercise M5.01", "\ud83d\udcc3 Solution for Exercise M5.02", "Table of contents", "\ud83c\udfa5 Intuitions on tree-based models", "Decision tree in classification", "Hyperparameters of decision tree", "Intuitions on tree-based models", "Module overview", "Main take-away", "\u2705 Quiz M5.01", "\u2705 Quiz M5.02", "\u2705 Quiz M5.03", "\u2705 Quiz M5.04", "Decision tree in regression", "\ud83c\udfc1 Wrap-up quiz 5", "Automated tuning", "\u2705 Quiz M3.02", "Manual tuning", "\u2705 Quiz M3.01", "Module overview", "Main take-away", "\ud83c\udfa5 Analysis of hyperparameter search results", "\ud83c\udfc1 Wrap-up quiz 3"], "terms": {"The": [0, 1, 2, 12, 17, 21, 32, 35, 36, 38, 41, 43, 46, 48, 55, 57, 59, 65, 70, 72, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 108, 109, 110, 111, 112, 113, 114, 116, 117, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 143, 144, 145, 146, 147, 148, 149, 150, 152, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 170, 172, 174, 177, 182, 183, 185], "diagram": [0, 2, 81], "present": [0, 7, 8, 12, 21, 22, 28, 33, 38, 45, 47, 49, 51, 53, 55, 61, 66, 70, 72, 73, 80, 84, 91, 95, 99, 100, 102, 103, 104, 105, 106, 107, 109, 110, 115, 116, 118, 119, 126, 138, 140, 142, 143, 145, 146, 152, 154, 158, 162, 166, 170, 171, 172], "api": [0, 5, 71, 77, 80, 82, 85, 142], "design": [0, 2, 5, 35, 79, 104, 139, 145], "modul": [0, 2, 13, 17, 22, 33, 35, 39, 46, 57, 71, 73, 79, 85, 103, 107, 131, 132, 135, 137, 142, 150, 151, 152, 159, 163, 165, 171, 183], "predict": [0, 12, 15, 17, 21, 22, 23, 24, 28, 32, 35, 38, 39, 41, 42, 46, 48, 50, 55, 57, 59, 60, 65, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 81, 82, 83, 85, 86, 88, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 118, 119, 120, 121, 122, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 145, 147, 149, 151, 153, 156, 157, 158, 159, 160, 161, 162, 164, 170, 174, 177, 181, 182, 185], "model": [0, 9, 10, 12, 13, 14, 16, 17, 21, 22, 23, 24, 25, 26, 27, 28, 31, 32, 33, 34, 35, 36, 38, 39, 41, 42, 43, 46, 50, 52, 55, 57, 59, 60, 63, 65, 68, 70, 71, 72, 73, 75, 77, 78, 79, 82, 83, 84, 86, 87, 88, 89, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 106, 107, 110, 111, 113, 114, 116, 117, 119, 120, 122, 123, 124, 125, 127, 129, 130, 131, 135, 136, 137, 138, 139, 140, 142, 143, 144, 145, 146, 147, 148, 149, 151, 153, 155, 156, 157, 160, 161, 162, 163, 164, 170, 171, 177, 179, 181, 182, 183, 185], "pipelin": [0, 2, 12, 21, 32, 33, 35, 38, 42, 46, 49, 55, 59, 65, 67, 70, 71, 72, 76, 81, 85, 86, 88, 92, 93, 94, 96, 97, 98, 99, 104, 107, 108, 116, 119, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 139, 141, 148, 149, 150, 151, 152, 154, 155, 156, 170, 179, 181, 182, 185], "us": [0, 13, 16, 17, 21, 22, 23, 25, 26, 27, 28, 31, 32, 33, 34, 35, 36, 37, 38, 39, 41, 42, 43, 46, 48, 55, 59, 63, 64, 65, 67, 68, 70, 71, 72, 73, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 151, 152, 153, 155, 156, 157, 158, 159, 160, 162, 163, 164, 165, 172, 173, 177, 179, 182, 185], "follow": [0, 2, 12, 13, 16, 17, 21, 22, 26, 28, 32, 33, 39, 41, 43, 46, 48, 55, 57, 59, 65, 68, 70, 71, 72, 73, 77, 80, 81, 82, 84, 85, 92, 93, 94, 96, 97, 98, 100, 108, 110, 114, 115, 117, 119, 123, 130, 131, 132, 133, 136, 137, 138, 139, 140, 141, 142, 145, 148, 149, 150, 151, 152, 154, 155, 156, 158, 162, 170, 171, 177, 179, 182, 183, 185], "paramet": [0, 5, 14, 17, 25, 26, 27, 28, 36, 39, 41, 43, 46, 52, 55, 57, 59, 65, 72, 76, 77, 78, 79, 81, 82, 83, 84, 86, 88, 92, 96, 97, 98, 101, 102, 108, 109, 110, 111, 113, 114, 117, 118, 119, 120, 122, 123, 126, 129, 132, 135, 138, 140, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 159, 163, 170, 177, 179, 181, 182, 185], "free": [0, 5, 35, 84, 114, 117, 123, 133, 185], "icon": [0, 35], "licens": [0, 35], "under": [0, 23, 28, 35, 39, 73, 92, 97, 102, 118, 132, 139, 142, 145, 161, 170], "cc": [0, 35], "BY": [0, 35], "3": [0, 3, 17, 28, 41, 46, 55, 59, 72, 73, 75, 79, 80, 81, 82, 83, 84, 90, 91, 92, 94, 96, 97, 98, 99, 101, 104, 105, 106, 107, 109, 110, 114, 115, 116, 117, 118, 120, 121, 123, 125, 126, 129, 130, 132, 133, 135, 136, 139, 140, 141, 142, 143, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 160, 161, 162, 163, 164, 165, 170, 177, 182], "0": [0, 2, 3, 17, 28, 36, 41, 43, 46, 65, 72, 73, 75, 76, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 162, 163, 164, 177, 179, 185], "sourc": [0, 117], "set": [0, 5, 17, 25, 28, 34, 36, 38, 39, 41, 43, 46, 50, 52, 55, 57, 59, 60, 63, 65, 72, 73, 76, 78, 79, 80, 81, 83, 84, 85, 86, 88, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 124, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 147, 148, 149, 150, 152, 154, 155, 156, 157, 158, 159, 161, 162, 163, 165, 175, 177, 179, 180, 181, 182, 185], "gear": 0, "svg": 0, "vector": [0, 41, 42, 92, 97, 99, 101, 115, 124, 127, 128, 132, 134, 139, 142], "cc0": 0, "close": [0, 2, 28, 43, 48, 73, 81, 96, 99, 100, 101, 107, 108, 115, 122, 123, 132, 133, 137, 138, 139, 141, 145, 150, 152, 154, 157], "mit": 0, "penguin": [1, 17, 74, 75, 109, 112, 121, 128, 129, 131, 134, 135, 137, 138, 140, 141, 157, 159, 160, 162, 163, 164, 165, 185], "adult": [1, 63, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 93, 98, 119, 130, 136, 148, 150, 151, 152, 154, 155, 165], "censu": [1, 63, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 93, 98, 101, 107, 119, 130, 136, 148, 150, 151, 152, 154, 155, 165], "california": [1, 91, 101, 104, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 165], "hous": [1, 2, 48, 72, 90, 91, 95, 101, 102, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 133, 144, 145, 147, 165], "am": [1, 133, 144, 145, 147, 165], "blood": [1, 92, 97, 142, 143, 146, 165], "transfus": [1, 92, 97, 142, 143, 146, 165], "bike": [1, 28, 165], "ride": [1, 28, 165], "aim": [2, 35, 79, 92, 97, 100, 101, 109, 111, 112, 113, 114, 118, 120, 121, 122, 123, 124, 125, 127, 128, 129, 134, 135, 140, 142, 143, 146, 159, 160, 163, 164], "describ": [2, 68, 79, 80, 81, 91, 107, 185], "For": [2, 5, 35, 41, 52, 57, 70, 73, 76, 79, 80, 81, 84, 85, 90, 91, 94, 96, 98, 101, 102, 105, 107, 108, 112, 113, 115, 117, 119, 121, 122, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 139, 140, 141, 142, 145, 147, 150, 151, 152, 154, 157, 161, 163, 164, 173, 177, 182], "you": [2, 5, 13, 17, 22, 28, 33, 35, 39, 46, 49, 57, 59, 65, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 95, 97, 98, 99, 101, 102, 103, 105, 107, 108, 109, 111, 112, 113, 114, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 171, 175, 177, 179, 181, 183, 185], "don": [2, 5, 77, 82, 92, 97, 101, 106, 124, 127, 133, 150], "t": [2, 5, 77, 82, 84, 92, 94, 97, 100, 101, 103, 106, 108, 116, 123, 124, 127, 133, 142, 145, 146, 150], "find": [2, 5, 48, 57, 73, 79, 80, 81, 84, 92, 93, 96, 97, 98, 100, 102, 111, 112, 114, 120, 121, 123, 126, 127, 129, 131, 132, 133, 135, 137, 138, 148, 149, 150, 152, 154, 155, 156, 157, 159, 160, 163, 164, 170, 179], "we": [2, 12, 13, 17, 21, 22, 24, 27, 28, 32, 33, 36, 38, 39, 41, 46, 48, 52, 55, 57, 59, 63, 65, 68, 70, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 170, 171, 177, 179, 181, 182, 185], "ad": [2, 17, 39, 46, 52, 92, 95, 97, 108, 113, 117, 119, 122, 139, 150, 154], "bottom": [2, 73, 142], "page": [2, 35, 79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "acronym": [2, 100], "stand": [2, 80, 110, 179], "applic": [2, 5, 80, 94, 100, 142, 145, 161], "program": [2, 17, 28, 35, 46, 59, 70, 72, 80, 94, 124, 127, 177, 185], "interfac": [2, 80], "It": [2, 5, 28, 35, 57, 73, 77, 80, 81, 82, 84, 85, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 115, 116, 118, 119, 123, 124, 127, 132, 133, 137, 140, 142, 143, 145, 146, 147, 152, 153, 154, 157, 161, 162, 163], "can": [2, 5, 13, 14, 17, 21, 22, 27, 28, 33, 35, 38, 39, 43, 46, 52, 55, 57, 59, 65, 68, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 164, 171, 172, 174, 177, 179, 182, 183, 185], "have": [2, 24, 28, 41, 46, 48, 52, 57, 59, 65, 68, 73, 76, 78, 79, 80, 81, 83, 84, 88, 91, 95, 96, 99, 100, 101, 104, 105, 106, 107, 108, 109, 112, 115, 116, 117, 118, 119, 121, 122, 124, 125, 127, 129, 132, 133, 134, 135, 136, 139, 140, 141, 142, 145, 148, 150, 151, 152, 153, 154, 155, 156, 157, 158, 161, 162, 163, 170, 177, 179, 182, 183, 185], "slightli": [2, 15, 73, 82, 84, 85, 101, 105, 109, 110, 117, 118, 119, 122, 137], "differ": [2, 5, 14, 15, 17, 21, 24, 28, 36, 41, 43, 46, 52, 57, 59, 63, 65, 68, 72, 73, 74, 75, 76, 80, 81, 84, 85, 89, 91, 92, 94, 95, 96, 97, 99, 100, 101, 103, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 127, 128, 131, 132, 133, 134, 135, 137, 138, 139, 143, 145, 146, 147, 149, 151, 152, 153, 154, 156, 157, 158, 161, 162, 177, 185], "mean": [2, 5, 27, 28, 46, 59, 72, 73, 76, 77, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 127, 129, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 144, 145, 146, 147, 150, 151, 152, 154, 155, 156, 157, 159, 162, 163, 174, 185], "context": [2, 5, 14, 100, 141, 142], "some": [2, 5, 17, 21, 22, 28, 32, 35, 46, 59, 63, 65, 70, 72, 73, 74, 75, 76, 78, 79, 80, 81, 83, 84, 86, 88, 90, 94, 96, 98, 99, 100, 101, 102, 103, 104, 105, 107, 108, 109, 110, 112, 114, 115, 116, 117, 118, 119, 121, 123, 124, 126, 127, 132, 133, 136, 137, 139, 140, 141, 145, 147, 150, 152, 153, 154, 156, 157, 161, 162, 164, 177, 185], "case": [2, 28, 32, 48, 73, 76, 79, 81, 84, 85, 87, 89, 90, 91, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 108, 115, 117, 119, 124, 127, 129, 132, 133, 135, 136, 137, 140, 141, 142, 143, 145, 146, 147, 149, 151, 152, 154, 156, 157], "an": [2, 15, 21, 22, 25, 27, 34, 35, 39, 41, 42, 46, 48, 50, 52, 55, 57, 68, 70, 71, 72, 73, 76, 80, 81, 83, 85, 86, 87, 88, 89, 90, 92, 94, 95, 96, 97, 99, 100, 101, 105, 106, 107, 108, 110, 113, 115, 116, 117, 118, 119, 122, 124, 125, 127, 129, 131, 132, 133, 134, 135, 137, 138, 139, 141, 142, 143, 145, 146, 147, 148, 150, 151, 152, 155, 156, 157, 161, 162, 163, 170, 173, 177, 179, 181, 182, 183, 185], "onlin": [2, 91, 93, 98], "servic": [2, 35, 85, 136], "access": [2, 14, 28, 35, 46, 57, 77, 82, 92, 97, 101, 108, 110, 112, 121, 131, 133, 137, 141, 142, 185], "remot": 2, "In": [2, 5, 14, 21, 22, 28, 33, 35, 36, 38, 39, 41, 43, 46, 57, 63, 71, 73, 76, 77, 79, 80, 81, 82, 83, 84, 85, 86, 88, 90, 91, 92, 93, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 123, 125, 126, 127, 128, 129, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 162, 163, 164, 171, 177, 179, 182], "both": [2, 14, 15, 17, 21, 28, 38, 39, 46, 52, 63, 72, 73, 75, 80, 81, 84, 85, 86, 87, 88, 89, 93, 94, 95, 96, 97, 98, 99, 102, 104, 108, 109, 113, 115, 116, 119, 122, 124, 125, 127, 128, 130, 132, 133, 134, 136, 137, 139, 141, 142, 143, 145, 146, 150, 152, 156, 157, 158, 160, 161, 164, 170, 171, 177, 179, 185], "itself": [2, 17, 41, 81, 94, 96, 97, 101, 103, 106, 110, 145, 152, 182], "technic": [2, 12, 21, 32, 35, 38, 55, 70, 80, 170, 182], "specif": [2, 12, 21, 22, 36, 63, 80, 81, 86, 87, 88, 89, 94, 101, 102, 105, 107, 108, 109, 115, 116, 119, 125, 133, 142, 150, 151, 153, 157, 161, 162, 177], "peopl": [2, 5, 73, 94, 101, 107, 108, 142], "who": [2, 5, 85, 142], "write": [2, 35, 74, 77, 78, 86, 87, 89, 92, 93, 94, 111, 112, 113, 114, 124, 128, 129, 130, 131, 139, 143, 144, 148, 149, 155, 159, 160], "client": 2, "connect": 2, "offlin": 2, "librari": [2, 35, 70, 73, 156], "scikit": [2, 12, 13, 21, 22, 23, 26, 27, 32, 33, 36, 37, 38, 39, 41, 43, 55, 57, 59, 63, 65, 67, 70, 71, 76, 77, 79, 81, 82, 84, 85, 86, 88, 93, 98, 99, 101, 107, 109, 112, 114, 116, 119, 121, 123, 124, 127, 132, 133, 139, 141, 142, 143, 145, 146, 147, 150, 152, 159, 163, 165, 170, 171, 173, 174, 180, 182, 183, 185], "list": [2, 28, 35, 46, 59, 84, 87, 89, 99, 103, 108, 110, 111, 113, 120, 122, 133, 143, 144, 146, 147, 151, 185], "all": [2, 14, 15, 16, 17, 23, 25, 26, 27, 28, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 76, 77, 79, 80, 81, 82, 84, 87, 89, 91, 93, 94, 95, 96, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 118, 119, 125, 130, 132, 133, 134, 136, 137, 139, 140, 142, 145, 147, 149, 150, 151, 152, 153, 156, 157, 160, 162, 163, 164, 172, 173, 175, 177, 179, 181, 185], "public": 2, "function": [2, 5, 17, 28, 35, 42, 50, 57, 59, 65, 72, 76, 79, 80, 81, 84, 93, 98, 101, 102, 104, 107, 108, 109, 110, 114, 115, 123, 124, 127, 128, 131, 132, 134, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 150, 152, 153, 157, 159, 162, 163, 174, 185], "class": [2, 14, 24, 26, 41, 59, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 97, 98, 99, 103, 104, 105, 106, 107, 108, 109, 110, 116, 119, 130, 131, 132, 133, 136, 137, 139, 140, 141, 143, 146, 147, 148, 150, 151, 152, 154, 155, 157, 159, 161, 173, 185], "method": [2, 3, 5, 13, 41, 59, 72, 73, 74, 75, 79, 80, 81, 85, 94, 100, 108, 111, 115, 118, 119, 120, 126, 129, 132, 133, 135, 139, 141, 142, 145, 151, 152, 154, 165, 185], "along": [2, 73, 76, 84, 86, 88, 104, 128, 134, 145, 157], "document": [2, 3, 5, 14, 59, 77, 79, 80, 81, 82, 84, 85, 86, 88, 90, 91, 93, 94, 96, 98, 101, 109, 132, 133, 136, 138, 139, 142, 143, 144, 146, 147, 150, 151, 152, 154, 157, 163, 164], "via": [2, 28, 35, 76, 84, 96, 102, 107, 114, 115, 116, 118, 119, 123, 125, 141, 150, 151, 161], "docstr": [2, 145], "brows": 2, "http": [2, 5, 35, 73, 80, 94, 101, 107], "org": [2, 73, 79, 80, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "stabl": [2, 108, 110, 133, 177, 185], "html": [2, 79, 81, 82, 85, 90, 96, 101, 107, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "try": [2, 5, 28, 46, 73, 79, 81, 82, 85, 86, 88, 90, 96, 101, 105, 109, 110, 115, 117, 119, 129, 131, 132, 133, 135, 136, 137, 138, 139, 142, 143, 145, 146, 147, 148, 150, 152, 154, 155, 156, 157, 163, 164, 177, 182], "adopt": [2, 132, 137], "simpl": [2, 5, 46, 73, 78, 83, 92, 97, 104, 109, 110, 118, 119, 131, 133, 137, 139, 140, 141, 151, 157, 163, 165], "convent": [2, 73, 80, 81, 139], "limit": [2, 17, 28, 31, 33, 41, 55, 57, 60, 79, 85, 101, 102, 103, 110, 132, 133, 139, 145, 154, 158, 160, 164, 165], "minimum": [2, 117, 137, 142, 154, 160, 161, 162, 164], "number": [2, 5, 15, 16, 17, 28, 36, 41, 43, 46, 48, 52, 59, 68, 72, 73, 76, 77, 79, 80, 81, 82, 84, 85, 86, 88, 92, 94, 95, 97, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 113, 114, 115, 116, 117, 119, 122, 123, 125, 126, 127, 129, 130, 132, 133, 134, 135, 136, 137, 139, 142, 144, 145, 147, 149, 150, 152, 153, 154, 156, 157, 161, 162, 172, 177, 179, 183, 185], "object": [2, 5, 28, 46, 57, 77, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 101, 103, 104, 105, 106, 119, 133, 136, 141, 142, 143, 146, 148, 150, 151, 152, 154, 155, 156, 157, 162, 177], "must": [2, 5, 43, 108, 130, 133, 136, 152, 185], "implement": [2, 89, 99, 109, 110, 116, 138, 141, 143, 146, 150, 153], "furthermor": [2, 46, 101, 132, 133, 135, 141], "tri": [2, 5, 65, 105, 109, 133, 137], "consist": [2, 39, 46, 72, 76, 80, 101, 129, 130, 135, 136, 139, 142, 147], "name": [2, 14, 28, 59, 73, 75, 79, 80, 81, 82, 83, 84, 85, 91, 92, 97, 98, 99, 101, 103, 104, 105, 106, 107, 108, 110, 115, 117, 120, 123, 129, 130, 131, 133, 135, 136, 137, 140, 141, 142, 150, 151, 153, 154, 181, 185], "same": [2, 17, 25, 28, 36, 39, 43, 46, 65, 73, 78, 79, 80, 81, 83, 84, 85, 89, 94, 95, 96, 98, 99, 101, 102, 103, 104, 108, 109, 110, 115, 116, 125, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 140, 142, 145, 147, 153, 154, 162, 163, 177, 185], "categori": [2, 68, 72, 73, 79, 85, 86, 88, 104, 106, 119, 130, 133, 136, 150, 158], "e": [2, 5, 17, 28, 46, 59, 63, 65, 68, 70, 71, 72, 73, 79, 80, 81, 83, 84, 85, 88, 94, 95, 96, 99, 100, 101, 103, 108, 115, 116, 117, 118, 125, 129, 133, 135, 137, 138, 141, 142, 145, 147, 148, 150, 154, 155, 156, 157, 177, 179, 181, 185], "g": [2, 5, 17, 28, 72, 73, 84, 85, 94, 96, 100, 108, 112, 117, 118, 121, 125, 128, 129, 133, 134, 135, 137, 138, 140, 141, 148, 150, 154, 155, 156, 158, 160, 161, 162, 164, 177, 185], "expos": [2, 81, 84, 119, 142], "fit_transform": [2, 65, 81, 84, 85, 103, 104, 116, 127, 139], "accept": [2, 84, 101], "similar": [2, 43, 46, 59, 65, 77, 81, 82, 84, 87, 89, 103, 104, 110, 117, 125, 129, 130, 132, 133, 135, 136, 139, 142, 147, 150, 151, 152, 153, 154, 157, 185], "argument": [2, 46, 59, 77, 81, 82, 84, 86, 88, 105, 185], "type": [2, 13, 22, 28, 42, 46, 49, 70, 71, 73, 79, 80, 86, 87, 88, 89, 101, 104, 105, 106, 107, 110, 124, 127, 142, 145, 177], "shape": [2, 41, 42, 73, 79, 80, 84, 104, 105, 107, 108, 109, 110, 132, 139, 141, 145, 157, 162], "those": [2, 14, 27, 55, 79, 93, 98, 102, 110, 116, 133, 139, 145, 150, 152, 156, 163, 174, 181, 185], "problem": [2, 14, 17, 21, 28, 38, 39, 43, 48, 49, 55, 59, 63, 72, 73, 78, 83, 84, 86, 88, 90, 92, 94, 97, 98, 99, 101, 105, 106, 107, 115, 125, 130, 132, 133, 136, 138, 139, 140, 141, 142, 145, 150, 152, 153, 156, 157, 158, 159, 161, 162, 170, 171, 177, 185], "where": [2, 5, 28, 39, 41, 42, 43, 59, 68, 72, 76, 81, 86, 88, 92, 94, 97, 99, 101, 103, 107, 108, 113, 119, 122, 125, 130, 132, 133, 136, 137, 140, 141, 145, 150, 152, 154, 156, 157, 161, 179, 185], "goal": [2, 24, 35, 73, 76, 77, 78, 82, 83, 86, 87, 88, 89, 90, 94, 117, 118, 119, 125, 145, 148, 149, 155, 156, 185], "take": [2, 15, 28, 46, 73, 76, 79, 80, 81, 85, 89, 91, 93, 94, 96, 98, 100, 101, 104, 105, 106, 107, 110, 128, 134, 140, 142, 150, 153, 154, 157, 165, 177, 179], "finit": [2, 63, 68, 73, 84], "valu": [2, 12, 14, 17, 23, 27, 28, 35, 36, 39, 41, 43, 46, 48, 59, 63, 65, 68, 72, 73, 77, 79, 82, 84, 85, 86, 88, 91, 94, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 125, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 145, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 160, 161, 162, 163, 164, 173, 174, 177, 179, 181, 182, 185], "exampl": [2, 5, 10, 13, 22, 33, 39, 48, 57, 70, 71, 73, 76, 78, 81, 83, 84, 85, 90, 91, 94, 99, 100, 102, 103, 105, 109, 110, 116, 119, 125, 133, 134, 136, 139, 140, 145, 147, 150, 151, 161, 163, 165, 171, 182, 183], "ar": [2, 12, 13, 14, 15, 16, 17, 21, 22, 24, 26, 27, 28, 32, 33, 35, 36, 38, 39, 42, 43, 46, 48, 52, 55, 57, 59, 65, 68, 70, 71, 72, 73, 74, 75, 76, 77, 79, 80, 81, 82, 84, 85, 88, 89, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 115, 116, 117, 118, 119, 120, 123, 125, 126, 127, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 145, 147, 149, 150, 151, 152, 153, 154, 155, 156, 157, 161, 162, 163, 164, 170, 171, 173, 177, 179, 181, 182, 183, 185], "iri": [2, 99], "setosa": 2, "versicolor": 2, "virginica": 2, "from": [2, 5, 13, 17, 23, 24, 28, 35, 36, 43, 46, 48, 55, 57, 59, 73, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 130, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 172, 174, 177, 179, 181, 182, 185], "petal": 2, "sepal": 2, "measur": [2, 5, 17, 28, 65, 72, 74, 75, 76, 79, 80, 105, 108, 122, 125, 128, 134, 140, 142, 152, 157, 158], "whether": [2, 24, 63, 72, 73, 76, 81, 85, 87, 89, 90, 92, 97, 101, 102, 103, 106, 108, 142, 147, 150, 152, 156, 160, 161, 164], "patient": [2, 5, 24, 129, 135], "ha": [2, 24, 36, 42, 43, 46, 63, 70, 72, 73, 79, 80, 81, 83, 84, 85, 89, 94, 98, 101, 102, 104, 107, 108, 109, 110, 116, 117, 122, 129, 130, 132, 133, 135, 136, 137, 138, 140, 142, 143, 145, 146, 149, 151, 154, 156, 157, 163, 177, 179, 185], "particular": [2, 5, 26, 70, 73, 76, 77, 79, 80, 82, 85, 89, 91, 96, 97, 101, 103, 119, 132, 137, 141, 142, 145, 150, 152, 153, 156, 163], "diseas": [2, 5, 24, 73, 129, 135], "result": [2, 28, 65, 73, 76, 78, 79, 80, 81, 83, 84, 85, 88, 89, 91, 93, 94, 96, 98, 99, 100, 101, 102, 103, 110, 113, 114, 115, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 132, 133, 135, 136, 137, 138, 139, 142, 143, 145, 146, 150, 151, 152, 154, 156, 157, 163, 165, 178, 179, 181, 185], "medic": [2, 5, 24, 73], "email": 2, "spam": 2, "content": [2, 79, 85, 104, 147, 150], "sender": 2, "titl": [2, 91, 94, 95, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 115, 122, 123, 125, 126, 131, 132, 133, 135, 137, 138, 139, 140, 141, 142, 146, 153, 157, 161, 162, 163, 164], "etc": [2, 24, 28, 35, 73, 84, 96, 105, 118, 129, 135], "when": [2, 5, 15, 16, 17, 21, 22, 26, 27, 28, 34, 39, 43, 52, 57, 59, 65, 72, 73, 76, 79, 80, 81, 84, 85, 86, 88, 92, 94, 95, 96, 97, 98, 99, 100, 101, 104, 105, 106, 108, 109, 113, 115, 116, 117, 119, 122, 123, 124, 125, 126, 127, 129, 132, 133, 135, 136, 137, 138, 140, 141, 142, 147, 150, 151, 152, 154, 157, 158, 174, 179, 182, 183], "two": [2, 12, 13, 28, 36, 46, 63, 65, 72, 73, 74, 75, 79, 80, 81, 84, 85, 94, 96, 98, 99, 101, 102, 103, 104, 106, 108, 115, 117, 118, 125, 126, 127, 128, 132, 133, 134, 136, 137, 138, 139, 141, 142, 148, 150, 152, 153, 154, 155, 156, 157, 158, 160, 162, 163, 164, 177, 185], "call": [2, 12, 13, 28, 43, 57, 59, 65, 68, 72, 73, 79, 80, 81, 84, 85, 86, 88, 91, 92, 95, 97, 98, 101, 102, 106, 109, 115, 116, 117, 118, 119, 127, 129, 132, 133, 135, 138, 139, 140, 141, 142, 143, 145, 146, 147, 148, 150, 152, 154, 155, 182], "binari": [2, 5, 41, 59, 73, 90, 98, 141, 142, 172, 185], "abov": [2, 5, 17, 28, 46, 72, 73, 77, 81, 82, 90, 96, 99, 101, 102, 103, 107, 108, 110, 115, 116, 128, 130, 133, 134, 135, 136, 140, 141, 142, 143, 145, 146, 150, 152, 156, 158, 160, 162, 163, 164, 177, 179, 185], "least": [2, 99, 117, 137, 161, 185], "three": [2, 76, 85, 98, 99, 102, 107, 108, 109, 112, 121, 132, 157, 158], "multi": [2, 103, 125, 129, 135, 141, 153], "below": [2, 17, 46, 59, 76, 83, 84, 90, 107, 108, 116, 122, 128, 134, 140, 142, 145, 154, 158, 160, 163, 164, 177, 179, 181, 185], "illustr": [2, 32, 57, 76, 80, 81, 84, 93, 96, 98, 99, 101, 108, 119, 132, 139, 142, 152, 157, 158, 161, 162], "provid": [2, 36, 72, 79, 81, 84, 85, 101, 103, 105, 107, 111, 114, 115, 116, 120, 123, 127, 131, 133, 136, 137, 138, 140, 141, 142, 143, 146, 147, 150, 152, 185], "user": [2, 5, 76, 85, 90, 102, 117, 118, 119, 132, 133, 135, 141, 145, 150, 154, 182, 185], "contain": [2, 17, 28, 42, 46, 59, 63, 65, 72, 73, 76, 79, 80, 84, 85, 86, 88, 94, 99, 103, 104, 105, 106, 107, 112, 119, 121, 124, 126, 127, 133, 139, 140, 141, 142, 150, 154, 160, 162, 164, 177, 185], "2": [2, 3, 17, 27, 28, 36, 41, 42, 46, 48, 65, 70, 73, 75, 76, 79, 80, 81, 82, 84, 85, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 113, 115, 116, 117, 118, 119, 120, 122, 123, 125, 126, 127, 129, 130, 132, 133, 134, 135, 136, 139, 140, 141, 142, 144, 145, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 163, 165, 177, 185], "repres": [2, 5, 63, 65, 68, 72, 73, 79, 80, 81, 84, 85, 99, 103, 104, 106, 108, 115, 116, 128, 133, 134, 139, 140, 141, 142, 145, 157, 163], "x": [2, 23, 27, 28, 41, 42, 65, 73, 79, 80, 81, 103, 105, 107, 108, 109, 110, 115, 121, 128, 129, 131, 132, 134, 135, 137, 138, 139, 140, 141, 149, 150, 153, 156, 157, 158, 161, 162, 163, 164, 179], "y": [2, 27, 28, 41, 65, 73, 79, 80, 81, 99, 107, 108, 109, 110, 115, 121, 128, 131, 132, 134, 137, 138, 139, 140, 141, 145, 153, 157, 158, 161, 162, 163, 164, 179], "axi": [2, 17, 28, 75, 91, 98, 99, 103, 104, 107, 108, 110, 123, 125, 126, 132, 133, 137, 139, 140, 142, 145, 150, 153, 154, 156, 157, 161, 179], "becaus": [2, 5, 17, 72, 73, 76, 77, 80, 81, 82, 84, 85, 86, 88, 98, 99, 100, 101, 102, 108, 110, 119, 122, 125, 132, 133, 139, 143, 145, 146, 147, 151, 152, 154, 156, 158, 162], "onli": [2, 5, 17, 28, 36, 41, 46, 48, 63, 65, 68, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 92, 93, 95, 96, 97, 98, 99, 101, 103, 105, 106, 109, 115, 117, 119, 125, 127, 129, 130, 131, 132, 133, 135, 136, 137, 140, 141, 142, 143, 145, 146, 150, 151, 152, 156, 157, 161, 174, 177, 181, 183], "here": [2, 28, 59, 73, 74, 76, 77, 78, 79, 80, 81, 84, 85, 86, 87, 88, 91, 92, 93, 97, 100, 102, 103, 105, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 124, 126, 127, 128, 129, 130, 131, 133, 134, 136, 137, 139, 140, 141, 142, 143, 144, 148, 149, 150, 151, 152, 155, 158, 159, 160, 162], "encod": [2, 5, 28, 46, 67, 68, 79, 85, 86, 88, 103, 104, 105, 107, 116, 119, 130, 132, 133, 136, 141, 165, 177], "color": [2, 73, 96, 103, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 134, 136, 137, 138, 139, 140, 141, 142, 146, 153, 156, 157, 161, 162, 163, 164, 179], "blue": [2, 73, 76, 94, 101, 103, 109, 110, 131, 132, 136, 137, 141, 142, 152, 157, 161, 163], "orang": [2, 73, 110, 115, 142, 157, 162, 163], "point": [2, 5, 14, 73, 77, 79, 81, 82, 89, 94, 99, 105, 106, 107, 108, 110, 116, 117, 119, 122, 132, 133, 137, 139, 141, 142, 145, 152, 154, 157, 161, 163], "thu": [2, 5, 28, 36, 60, 79, 81, 85, 92, 94, 96, 97, 100, 101, 102, 103, 105, 106, 107, 108, 109, 113, 116, 117, 118, 119, 122, 124, 127, 132, 133, 140, 141, 145, 150, 151, 152, 153, 156, 157, 161, 162, 177], "each": [2, 5, 17, 24, 28, 35, 39, 46, 48, 65, 68, 73, 74, 75, 76, 79, 80, 81, 84, 85, 93, 94, 96, 98, 99, 100, 101, 104, 105, 106, 107, 108, 109, 110, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 125, 126, 128, 132, 133, 134, 139, 140, 141, 142, 148, 150, 151, 152, 154, 155, 156, 157, 158, 160, 161, 162, 163, 164, 173, 177, 179, 185], "entir": [2, 34, 76, 84, 85, 106, 107, 117, 122, 124, 127, 137, 177], "wa": [2, 73, 80, 81, 89, 90, 94, 96, 97, 99, 101, 104, 107, 109, 110, 115, 119, 125, 128, 133, 134, 137, 139, 142, 152, 157, 162, 163], "linear": [2, 5, 12, 14, 15, 28, 36, 38, 39, 41, 42, 43, 46, 52, 57, 73, 79, 81, 84, 85, 86, 87, 88, 89, 92, 94, 97, 103, 107, 110, 118, 126, 128, 129, 130, 134, 135, 136, 144, 145, 147, 151, 157, 158, 160, 162, 164, 172, 174, 177, 182], "decis": [2, 5, 9, 12, 14, 15, 17, 26, 34, 41, 57, 81, 85, 87, 89, 91, 95, 100, 101, 102, 109, 110, 112, 113, 116, 118, 119, 120, 121, 122, 126, 132, 139, 141, 142, 143, 146, 158, 159, 160, 163, 164, 170, 171, 172, 173, 174, 175, 177], "rule": [2, 79, 80, 118, 132, 133, 136, 137, 141, 157, 173], "black": [2, 73, 79, 84, 91, 94, 96, 98, 101, 102, 104, 105, 106, 107, 109, 110, 115, 121, 123, 125, 126, 133, 134, 136, 138, 139, 140, 146, 150, 154, 161, 162, 164], "dot": 2, "line": [2, 28, 42, 68, 73, 79, 103, 108, 110, 115, 129, 132, 135, 137, 139, 141, 142, 143, 145, 146, 153, 154, 157, 161, 162, 179, 185], "new": [2, 5, 28, 35, 39, 46, 73, 77, 79, 80, 81, 82, 84, 85, 86, 88, 92, 93, 94, 95, 97, 98, 100, 101, 108, 109, 112, 113, 115, 117, 121, 122, 129, 135, 136, 139, 142, 148, 150, 151, 155, 160, 162, 163, 164], "accord": [2, 73, 137], "its": [2, 5, 17, 22, 28, 57, 60, 73, 78, 80, 81, 83, 84, 85, 92, 96, 97, 98, 100, 101, 102, 103, 108, 111, 115, 117, 119, 120, 125, 130, 131, 132, 133, 136, 137, 138, 140, 141, 142, 143, 145, 146, 149, 156, 157, 179], "posit": [2, 5, 26, 27, 28, 43, 46, 65, 102, 103, 108, 110, 118, 131, 133, 137, 140, 141, 142, 143, 145, 146, 154, 185], "respect": [2, 28, 41, 42, 84, 85, 89, 92, 97, 98, 100, 102, 117, 119, 132, 133, 135, 140, 141, 145, 147], "ly": [2, 133], "left": [2, 72, 73, 80, 91, 94, 98, 99, 100, 105, 107, 108, 109, 110, 115, 117, 118, 121, 127, 131, 134, 136, 137, 142, 152, 153, 157, 161, 163], "while": [2, 17, 28, 79, 80, 81, 84, 97, 101, 102, 103, 105, 107, 108, 109, 110, 115, 116, 117, 119, 122, 125, 127, 129, 130, 132, 133, 135, 136, 137, 142, 145, 153, 156, 161, 177], "right": [2, 73, 74, 75, 79, 84, 86, 88, 98, 108, 117, 122, 124, 127, 137, 142, 152, 161], "defin": [2, 17, 28, 35, 46, 55, 65, 72, 76, 81, 84, 85, 86, 88, 90, 93, 94, 98, 99, 100, 108, 109, 114, 123, 128, 129, 131, 132, 133, 134, 135, 137, 139, 140, 142, 148, 149, 150, 153, 154, 155, 156, 157, 177, 179, 181, 182, 185], "higher": [2, 5, 26, 38, 42, 82, 83, 85, 96, 101, 104, 105, 108, 109, 118, 132, 133, 136, 137, 138, 147, 152, 179], "dimens": [2, 42, 73, 103, 132, 136, 156], "would": [2, 17, 28, 48, 73, 74, 75, 76, 78, 79, 80, 81, 83, 84, 86, 88, 96, 97, 98, 99, 100, 101, 104, 106, 107, 108, 110, 115, 116, 117, 119, 122, 125, 129, 132, 133, 135, 137, 138, 139, 140, 141, 142, 143, 145, 146, 149, 152, 156, 157, 161, 162], "hyperplan": 2, "howev": [2, 5, 21, 35, 79, 80, 81, 84, 85, 87, 89, 91, 94, 96, 98, 99, 101, 102, 104, 105, 106, 108, 109, 110, 113, 115, 116, 117, 118, 119, 122, 124, 125, 127, 132, 133, 134, 138, 139, 141, 142, 143, 145, 146, 147, 152, 154, 156, 157, 161, 162, 182], "depend": [2, 17, 21, 26, 27, 28, 36, 39, 76, 81, 84, 85, 96, 98, 100, 107, 108, 110, 117, 125, 129, 132, 133, 135, 137, 141, 142, 147, 150, 154, 157, 158], "A": [2, 5, 28, 35, 43, 52, 60, 63, 65, 73, 81, 85, 94, 96, 100, 101, 107, 116, 117, 133, 136, 138, 139, 142, 145, 156, 162, 172, 173, 177, 183], "These": [2, 5, 22, 73, 76, 80, 84, 104, 119, 142, 151, 162, 182], "handl": [2, 70, 71, 79, 84, 85, 89, 104, 115, 139, 150, 165], "discret": [2, 84, 101, 106, 116, 141, 145], "1": [2, 3, 17, 26, 28, 41, 42, 43, 46, 48, 59, 65, 73, 75, 76, 79, 80, 81, 82, 84, 85, 87, 88, 89, 90, 91, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 142, 143, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 161, 162, 163, 165, 177, 179, 185], "cat": [2, 90], "dog": 2, "logisticregress": [2, 43, 72, 76, 78, 79, 81, 83, 84, 85, 86, 88, 90, 94, 98, 99, 127, 130, 131, 132, 136, 137, 141, 142, 147, 151, 157, 179, 181], "histgradientboostingclassifi": [2, 85, 87, 89, 116, 148, 150, 152, 153, 154, 155, 181], "note": [2, 7, 8, 35, 45, 46, 47, 49, 51, 53, 61, 66, 73, 76, 79, 80, 81, 84, 85, 89, 91, 92, 96, 97, 98, 101, 102, 106, 107, 108, 110, 118, 119, 137, 140, 141, 150, 151, 152, 156, 157, 162, 166, 177], "histor": 2, "reason": [2, 5, 17, 46, 57, 73, 75, 81, 87, 89, 96, 100, 101, 105, 108, 115, 117, 127, 131, 132, 137, 142, 156, 161], "confus": [2, 115, 151, 162, 182], "contrari": [2, 73, 84, 101, 108, 162], "what": [2, 5, 14, 23, 26, 27, 28, 34, 41, 42, 46, 48, 49, 57, 59, 72, 73, 74, 75, 77, 78, 79, 81, 82, 83, 85, 93, 96, 98, 99, 100, 101, 102, 108, 118, 119, 130, 131, 132, 135, 136, 137, 138, 140, 141, 142, 151, 152, 157, 162, 177, 179], "suggest": 2, "procedur": [2, 5, 17, 46, 76, 79, 96, 100, 101, 110, 117, 119, 122, 125, 126, 139, 152, 156, 172, 179, 181], "how": [2, 17, 21, 22, 25, 28, 32, 33, 38, 41, 46, 55, 57, 63, 68, 72, 73, 74, 75, 76, 78, 79, 80, 81, 83, 84, 85, 90, 91, 95, 97, 99, 100, 102, 103, 108, 109, 110, 115, 117, 119, 129, 131, 132, 133, 135, 137, 138, 142, 143, 146, 150, 151, 152, 153, 154, 156, 157, 159, 161, 162, 163, 170, 175, 179, 181, 182], "well": [2, 5, 12, 14, 21, 65, 75, 81, 94, 98, 99, 102, 106, 107, 108, 109, 110, 115, 117, 118, 126, 137, 142, 151, 154, 161], "idea": [2, 73, 90, 97, 108, 115, 152], "behind": [2, 5, 12, 21, 118], "dataset": [2, 15, 17, 24, 28, 34, 36, 38, 39, 41, 42, 46, 52, 55, 59, 62, 63, 65, 68, 70, 72, 74, 75, 76, 77, 78, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 165, 177, 179, 185], "evalu": [2, 17, 21, 22, 25, 28, 36, 43, 46, 55, 59, 64, 65, 73, 78, 79, 80, 83, 86, 87, 88, 89, 90, 91, 92, 94, 96, 97, 99, 100, 101, 106, 108, 110, 111, 113, 115, 116, 120, 122, 126, 127, 130, 133, 136, 138, 143, 144, 145, 146, 147, 148, 149, 150, 151, 154, 155, 156, 162, 177, 178, 182, 185], "separ": [2, 28, 41, 43, 46, 63, 73, 75, 78, 79, 83, 85, 98, 104, 105, 106, 132, 136, 137, 141, 152, 157, 161, 163, 173, 177], "repeat": [2, 17, 28, 46, 57, 59, 76, 78, 83, 86, 88, 94, 95, 96, 99, 101, 125, 131, 137, 143, 146, 149, 156, 159, 160, 162, 163, 164, 185], "sever": [2, 12, 15, 22, 65, 76, 94, 96, 101, 108, 109, 110, 113, 115, 116, 118, 119, 122, 128, 132, 134, 137, 139, 154], "time": [2, 17, 28, 46, 65, 73, 76, 79, 81, 84, 85, 86, 87, 88, 89, 90, 94, 96, 99, 100, 101, 102, 105, 106, 108, 110, 113, 115, 116, 117, 118, 119, 122, 125, 130, 133, 136, 137, 141, 142, 150, 153, 154, 157, 159, 163, 165, 172, 177], "get": [2, 5, 17, 33, 41, 46, 52, 59, 65, 73, 74, 75, 76, 77, 79, 80, 81, 82, 85, 86, 87, 88, 89, 94, 95, 96, 99, 100, 101, 102, 105, 107, 109, 110, 113, 114, 115, 122, 123, 124, 125, 127, 130, 133, 136, 137, 139, 142, 143, 144, 145, 146, 147, 150, 152, 153, 154, 156, 157, 160, 161, 162, 164, 165, 179, 180, 181], "s": [2, 3, 5, 16, 22, 24, 25, 28, 34, 35, 41, 46, 72, 73, 76, 77, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 115, 117, 118, 120, 121, 125, 126, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 145, 147, 150, 151, 152, 153, 154, 157, 158, 161, 162, 182, 185], "uncertainti": [2, 76, 85, 110, 124, 127, 152], "see": [2, 7, 8, 38, 45, 47, 49, 51, 53, 61, 66, 73, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 91, 94, 95, 97, 98, 99, 100, 103, 104, 105, 106, 107, 108, 109, 110, 115, 116, 118, 120, 122, 123, 125, 127, 132, 133, 135, 136, 138, 139, 140, 141, 142, 145, 147, 150, 151, 152, 153, 154, 156, 157, 158, 159, 161, 162, 163, 166], "more": [2, 17, 28, 35, 42, 43, 46, 52, 55, 59, 73, 76, 79, 80, 81, 84, 86, 88, 89, 91, 92, 95, 96, 97, 98, 102, 103, 104, 105, 107, 108, 109, 110, 115, 116, 117, 118, 119, 125, 129, 130, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 150, 151, 152, 153, 154, 156, 157, 158, 161, 162, 185], "detail": [2, 12, 17, 28, 38, 60, 73, 76, 77, 79, 80, 82, 84, 85, 86, 88, 92, 94, 97, 103, 105, 107, 108, 115, 117, 118, 132, 139, 141, 142, 143, 146, 147, 150, 158, 170, 171, 185], "n_sampl": [2, 110, 115, 125, 126, 132, 139, 141, 161], "row": [2, 17, 59, 63, 73, 84, 90, 101, 104, 107, 110, 129, 130, 133, 135, 136, 139, 150, 153, 154, 156, 185], "n_featur": [2, 42, 117, 119, 125, 126, 132, 139, 141], "column": [2, 5, 17, 28, 36, 41, 42, 46, 59, 63, 68, 72, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 92, 93, 97, 98, 100, 101, 103, 104, 105, 106, 107, 108, 110, 115, 117, 119, 120, 121, 123, 125, 126, 129, 130, 132, 133, 135, 136, 139, 140, 142, 143, 144, 145, 146, 147, 148, 150, 152, 153, 154, 155, 156, 161, 162, 164, 177, 185], "equal": [2, 5, 28, 41, 43, 59, 81, 99, 115, 133, 150, 151, 162], "flower": 2, "4": [2, 3, 17, 28, 72, 73, 75, 79, 80, 81, 82, 83, 84, 87, 89, 90, 91, 94, 97, 98, 99, 101, 104, 105, 106, 107, 108, 110, 115, 116, 117, 118, 120, 123, 125, 129, 130, 131, 132, 133, 135, 136, 137, 139, 140, 142, 147, 150, 151, 152, 153, 154, 156, 158, 161, 163, 165, 177], "length": [2, 5, 17, 28, 74, 75, 79, 80, 104, 105, 109, 112, 121, 128, 129, 131, 134, 135, 137, 138, 140, 141, 142, 150, 154, 157, 158, 159, 160, 161, 162, 163, 164, 185], "width": [2, 116], "common": [2, 57, 76, 84, 100, 110, 133, 136, 145, 163, 185], "math": [2, 104], "matric": [2, 84], "capit": [2, 73, 76, 78, 79, 80, 81, 83, 84, 85, 103, 130, 136, 150, 151, 154], "letter": [2, 101, 107], "f": [2, 7, 8, 17, 28, 45, 47, 49, 51, 53, 61, 66, 73, 76, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 94, 96, 97, 99, 100, 101, 107, 108, 109, 110, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 131, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 145, 146, 147, 150, 151, 152, 154, 155, 157, 161, 163, 166, 179], "iter": [2, 28, 65, 76, 81, 84, 99, 100, 101, 113, 117, 122, 133, 136, 152, 153, 154], "optim": [2, 21, 22, 43, 63, 81, 92, 96, 97, 109, 114, 115, 116, 118, 123, 127, 129, 133, 135, 138, 142, 145, 147, 150, 151, 152, 154, 156, 161, 177, 183, 185], "befor": [2, 43, 46, 73, 77, 79, 81, 82, 84, 99, 101, 103, 104, 108, 116, 119, 124, 127, 133, 140, 142, 145], "converg": [2, 65, 81, 84], "algorithm": [2, 12, 15, 17, 23, 34, 73, 80, 81, 84, 94, 108, 109, 113, 115, 116, 117, 119, 122, 124, 125, 127, 129, 135, 139, 141, 157, 158], "avoid": [2, 5, 39, 46, 72, 73, 96, 108, 109, 110, 113, 121, 122, 127, 129, 130, 135, 136, 137, 138, 139, 150, 153, 154, 156], "over": [2, 14, 23, 28, 38, 39, 73, 92, 94, 96, 97, 100, 102, 108, 112, 116, 118, 121, 145, 148, 150, 155, 161, 170, 173], "done": [2, 5, 72, 80, 88, 116, 117, 119, 127, 129, 132, 133, 135, 151, 152, 154, 157, 161], "monitor": [2, 5, 28], "score": [2, 17, 26, 27, 28, 36, 46, 57, 59, 65, 72, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 92, 93, 94, 95, 96, 97, 98, 101, 102, 107, 108, 113, 114, 115, 116, 117, 118, 120, 122, 123, 124, 125, 126, 127, 130, 131, 133, 135, 136, 137, 139, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 157, 163, 175, 177, 179, 185], "jargon": [2, 80, 81], "onc": [2, 5, 17, 28, 79, 81, 89, 94, 99, 101, 103, 110, 117, 119, 141, 142, 144, 147, 148, 149, 150, 153, 154, 155, 156, 179], "quantiti": [2, 5, 84, 85, 105, 108], "size": [2, 48, 55, 57, 58, 84, 91, 92, 97, 105, 107, 109, 110, 117, 127, 132, 157, 165], "weight": [2, 28, 39, 41, 43, 46, 79, 92, 94, 97, 108, 109, 115, 128, 129, 133, 134, 135, 138, 139, 140, 141, 157, 172], "dure": [2, 12, 21, 24, 28, 32, 33, 38, 39, 55, 57, 71, 72, 73, 79, 84, 86, 88, 99, 101, 105, 109, 118, 125, 133, 138, 143, 146, 150, 152, 154, 157, 160, 162, 164, 170, 171, 182, 183], "collect": [2, 5, 28, 73, 76, 93, 98, 101, 103, 118, 125, 147], "four": [2, 105, 106, 142], "never": [2, 17, 55, 73, 79, 80, 84, 109, 110, 136, 137, 145, 150, 154, 163, 179], "seen": [2, 24, 76, 78, 80, 81, 83, 84, 101, 130, 136, 137, 140, 142, 150, 151, 152, 160, 164], "aspect": [2, 5, 21, 33, 76, 81, 95, 126, 133, 172], "configur": [2, 81, 114, 123, 139, 150], "learnt": [2, 57, 95, 108, 139, 157], "k": [2, 24, 59, 76, 79, 80, 81, 91, 95, 101, 102, 109, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 125, 126, 127, 145, 147, 149, 150, 152, 153, 156, 163, 182], "nearest": [2, 59, 79, 80, 81, 102, 182, 185], "neighbor": [2, 59, 77, 79, 80, 81, 82, 102, 149, 156, 182, 185], "approach": [2, 5, 13, 22, 33, 39, 46, 57, 71, 72, 84, 99, 103, 116, 132, 138, 141, 150, 152, 154, 156, 171, 183], "polynomi": [2, 42, 52, 102, 110, 132, 139], "sai": [2, 59, 108, 131, 137, 157], "degre": [2, 42, 46, 52, 102, 108, 110, 129, 130, 132, 133, 135, 136, 139], "between": [2, 5, 12, 14, 17, 21, 28, 39, 46, 52, 55, 57, 59, 73, 75, 76, 79, 81, 84, 92, 96, 97, 100, 102, 103, 105, 106, 108, 110, 112, 115, 117, 121, 124, 125, 127, 129, 131, 132, 133, 134, 135, 137, 139, 140, 141, 142, 145, 153, 154, 156, 158, 161, 162, 170, 185], "10": [2, 3, 17, 24, 35, 41, 46, 59, 72, 73, 76, 77, 79, 82, 84, 89, 91, 93, 94, 95, 96, 97, 98, 101, 102, 104, 105, 106, 107, 108, 110, 113, 116, 117, 118, 120, 122, 127, 129, 130, 131, 132, 133, 135, 136, 137, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 161, 163, 177, 179, 185], "impact": [2, 17, 43, 46, 55, 84, 86, 87, 88, 89, 95, 96, 102, 103, 113, 117, 122, 130, 133, 136, 145, 153, 156, 161, 179, 181, 182, 183, 185], "comput": [2, 5, 28, 46, 59, 65, 73, 76, 77, 80, 81, 82, 84, 85, 87, 89, 91, 92, 93, 95, 96, 97, 98, 99, 101, 102, 108, 109, 110, 113, 115, 116, 117, 118, 122, 124, 125, 127, 129, 130, 132, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 149, 152, 153, 154, 156, 157, 159, 162, 163, 179], "inde": [2, 17, 28, 73, 79, 80, 81, 85, 89, 92, 94, 96, 97, 100, 101, 103, 104, 105, 106, 107, 108, 109, 110, 115, 116, 117, 118, 124, 125, 126, 127, 131, 132, 133, 135, 137, 138, 139, 141, 142, 143, 145, 146, 147, 151, 152, 154, 157, 158, 161, 162, 163], "usual": [2, 16, 79, 81, 85, 94, 100, 102, 105, 129, 135, 142, 153, 154], "inspect": [2, 5, 28, 59, 63, 81, 90, 101, 103, 109, 114, 117, 123, 126, 130, 131, 132, 136, 137, 139, 141, 150, 153, 154, 157, 159, 161, 163, 185], "regard": [2, 12, 13, 21, 23, 32, 33, 38, 41, 70, 72, 81, 84, 86, 88, 91, 92, 94, 95, 97, 99, 102, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 127, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 148, 155, 157, 159, 160, 161, 162, 163, 164], "tune": [2, 12, 16, 25, 34, 39, 42, 43, 46, 92, 96, 97, 102, 103, 111, 118, 119, 120, 125, 127, 131, 132, 137, 149, 153, 155, 156, 171, 177, 182, 185], "maxim": [2, 46, 94, 96, 133, 147, 148, 149, 150, 154, 155, 156, 157, 175, 182], "involv": [2, 5, 73, 98, 101, 151], "grid": [2, 96, 105, 114, 117, 118, 123, 133, 149, 152, 154, 156, 161, 165, 175, 177, 178, 179, 183, 185], "search": [2, 46, 95, 96, 114, 116, 117, 118, 119, 120, 123, 133, 138, 148, 149, 152, 155, 156, 161, 165, 175, 177, 178, 179, 182, 183, 185], "random": [2, 10, 12, 13, 14, 16, 17, 36, 46, 50, 72, 79, 94, 98, 99, 100, 101, 105, 107, 108, 110, 112, 113, 115, 116, 118, 121, 122, 124, 125, 126, 127, 132, 139, 149, 150, 153, 156, 165, 173, 178, 179, 183], "further": [2, 38, 73, 94, 95, 132, 139, 140, 152, 161, 163], "read": [2, 5, 73, 94, 105, 108, 145, 185], "post": [2, 48, 68, 171], "machin": [2, 22, 28, 33, 35, 39, 55, 57, 63, 68, 71, 73, 79, 80, 81, 83, 84, 85, 86, 88, 90, 92, 93, 96, 97, 98, 99, 100, 101, 110, 119, 124, 125, 126, 127, 132, 133, 136, 138, 139, 141, 142, 145, 150, 154], "mooc": [2, 73, 80, 90, 91, 92, 95, 97, 101, 102, 103, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164], "refer": [2, 13, 22, 33, 39, 57, 71, 72, 73, 80, 81, 86, 88, 91, 92, 93, 94, 95, 97, 98, 101, 102, 107, 109, 110, 111, 112, 113, 115, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 151, 157, 159, 160, 161, 162, 163, 164, 171, 183], "process": [2, 5, 35, 57, 65, 71, 72, 73, 85, 100, 110, 116, 124, 125, 127, 133, 147, 151, 152, 182], "make": [2, 5, 23, 24, 28, 43, 46, 50, 55, 59, 60, 70, 72, 73, 76, 77, 79, 81, 82, 84, 85, 86, 88, 89, 90, 91, 92, 94, 96, 97, 98, 100, 101, 102, 104, 105, 107, 108, 109, 112, 113, 114, 115, 116, 117, 118, 119, 121, 122, 123, 124, 125, 126, 127, 129, 130, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 144, 145, 147, 148, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 172], "appli": [2, 5, 14, 15, 16, 17, 23, 25, 26, 28, 38, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 81, 84, 85, 90, 99, 102, 104, 105, 107, 132, 140, 147, 150, 151, 153, 156, 161, 163, 172, 173, 175, 179, 181, 185], "unlabel": 2, "word": [2, 28, 138, 160, 164, 179], "equival": [2, 17, 28, 46, 72, 73, 80, 92, 97, 101, 117, 131, 137, 139, 141, 142, 156, 177], "unseen": [2, 65, 73, 99, 100, 117, 157], "notion": 2, "out": [2, 5, 24, 72, 73, 76, 79, 80, 84, 93, 95, 98, 100, 101, 108, 110, 113, 114, 117, 118, 122, 123, 127, 131, 133, 136, 137, 142, 150, 152, 159, 160, 163, 164], "ti": 2, "definit": [2, 139, 151], "distribut": [2, 5, 42, 73, 74, 75, 81, 93, 94, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 116, 132, 133, 141, 145, 147, 149, 152, 154, 156, 158, 162, 179], "condit": [2, 5, 108, 133, 141], "check": [2, 5, 28, 46, 59, 73, 76, 79, 80, 81, 83, 84, 85, 92, 94, 95, 97, 99, 100, 101, 102, 103, 105, 107, 109, 110, 111, 112, 114, 115, 116, 118, 119, 120, 121, 123, 124, 125, 126, 127, 129, 133, 135, 138, 139, 140, 142, 143, 146, 150, 152, 153, 157, 158, 160, 161, 162, 164, 170, 177], "wikipedia": [2, 57, 110], "articl": [2, 5, 57, 110], "finish": [2, 133], "_": [2, 73, 75, 80, 81, 85, 91, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 120, 121, 122, 123, 125, 126, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 151, 153, 157, 158, 161, 162, 163, 164], "end": [2, 81, 84, 91, 92, 94, 95, 97, 98, 99, 101, 102, 103, 105, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 155, 157, 159, 160, 161, 162, 163, 164], "thei": [2, 5, 28, 46, 55, 65, 73, 77, 79, 81, 82, 84, 85, 89, 98, 99, 102, 104, 107, 108, 110, 116, 117, 119, 132, 133, 139, 142, 145, 151, 157, 162, 164, 171, 177, 182], "avail": [2, 5, 28, 35, 48, 72, 73, 74, 75, 76, 80, 90, 94, 95, 104, 106, 107, 112, 121, 125, 127, 133, 138, 140, 142, 151, 154, 177], "after": [2, 17, 27, 46, 76, 84, 101, 104, 110, 113, 116, 117, 120, 122, 138, 151, 155, 156], "been": [2, 73, 76, 79, 84, 91, 99, 101, 108, 109, 116, 122, 125, 135, 141, 151, 152, 157], "slope": [2, 28, 105, 140], "intercept": [2, 39, 128, 129, 130, 134, 135, 136, 137, 138, 139, 140, 141, 157], "one": [2, 5, 13, 17, 24, 36, 41, 42, 46, 57, 65, 68, 72, 73, 75, 76, 78, 79, 80, 81, 83, 84, 85, 87, 89, 92, 93, 94, 96, 97, 98, 99, 101, 103, 105, 107, 108, 109, 115, 116, 117, 119, 126, 130, 132, 133, 135, 136, 138, 139, 141, 142, 143, 145, 146, 150, 151, 152, 156, 157, 158, 161, 163, 173, 185], "section": [2, 76, 79, 84, 85, 91, 92, 94, 95, 97, 101, 102, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 154, 157, 159, 160, 161, 162, 163, 164], "about": [2, 12, 16, 21, 23, 28, 32, 35, 38, 55, 59, 63, 70, 71, 73, 76, 80, 81, 84, 93, 95, 98, 101, 102, 104, 106, 108, 115, 118, 131, 132, 137, 141, 145, 150, 158, 162, 170, 179, 182, 185], "also": [2, 12, 21, 24, 26, 28, 32, 35, 43, 57, 59, 73, 79, 80, 81, 84, 85, 86, 87, 88, 89, 92, 95, 96, 97, 100, 101, 102, 103, 104, 105, 108, 109, 110, 113, 114, 116, 117, 119, 122, 123, 125, 130, 132, 133, 136, 137, 139, 140, 141, 142, 145, 151, 152, 153, 154, 156, 157, 163], "python": [2, 5, 28, 35, 65, 70, 73, 76, 86, 88, 101, 110, 116, 133, 143, 146], "pass": [2, 17, 28, 46, 59, 65, 72, 76, 84, 85, 86, 88, 92, 96, 97, 101, 110, 140, 143, 144, 146, 147, 149, 150, 152, 156, 157, 163, 185], "anoth": [2, 5, 17, 57, 72, 73, 76, 96, 98, 100, 110, 119, 125, 126, 128, 133, 134, 139, 142, 145, 161], "includ": [2, 28, 33, 35, 63, 84, 96, 104, 125, 126, 129, 130, 133, 135, 136, 137, 139, 151, 174], "gridsearchcv": [2, 5, 96, 118, 123, 149, 150, 152, 154, 156, 161, 177, 179, 185], "someth": [2, 89, 151], "occur": [2, 84, 133], "your": [2, 5, 33, 38, 59, 72, 73, 74, 77, 78, 82, 84, 85, 86, 87, 88, 89, 92, 93, 97, 99, 111, 112, 113, 114, 117, 124, 127, 128, 129, 130, 131, 135, 136, 143, 144, 148, 149, 155, 159, 160, 164, 177, 179], "stick": 2, "too": [2, 5, 60, 80, 90, 96, 101, 102, 113, 116, 117, 119, 122, 132, 133, 135, 137, 150, 156, 161, 179], "so": [2, 5, 13, 28, 46, 65, 73, 75, 81, 83, 84, 86, 88, 91, 92, 97, 98, 99, 100, 101, 104, 105, 108, 109, 114, 115, 117, 123, 129, 132, 133, 135, 137, 138, 139, 142, 143, 146, 151, 152, 156, 163, 177], "up": [2, 5, 9, 34, 43, 73, 76, 79, 84, 94, 96, 98, 101, 103, 107, 108, 125, 128, 134, 139, 140, 142, 150, 162, 165, 182], "nois": [2, 46, 52, 57, 102, 110, 115, 132, 137, 139], "rather": [2, 28, 33, 76, 79, 105, 106, 141, 142, 147, 154, 156], "than": [2, 5, 17, 26, 27, 28, 33, 35, 42, 46, 52, 59, 72, 73, 76, 79, 80, 81, 82, 83, 84, 85, 88, 89, 91, 93, 94, 96, 97, 98, 100, 101, 102, 104, 105, 108, 109, 110, 116, 117, 118, 119, 122, 124, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 137, 141, 142, 145, 147, 149, 150, 152, 153, 154, 156, 157, 160, 161, 164, 174, 177, 179, 185], "relev": [2, 59, 79, 94, 108, 142], "pattern": [2, 22, 73, 94, 124, 127, 143, 146, 152], "tell": [2, 73, 102, 108], "great": [2, 5, 48], "poorli": [2, 88], "real": [2, 48, 73, 79, 80, 84, 85, 99, 101, 128, 134, 142, 145, 162], "world": [2, 162], "fit_predict": 2, "kneighborsclassifi": [2, 59, 77, 80, 82, 185], "decisiontreeregressor": [2, 17, 91, 95, 100, 101, 102, 110, 111, 115, 118, 119, 120, 139, 161, 162, 164, 177], "One": [2, 48, 59, 68, 72, 76, 77, 82, 84, 99, 100, 106, 108, 116, 142, 145, 152], "focu": [2, 17, 73, 79, 101, 104, 107, 109, 115, 133, 142, 145, 150, 151, 157], "were": [2, 28, 65, 76, 79, 85, 94, 101, 102, 106, 115, 124, 127, 128, 134, 142, 154, 156, 161, 164], "If": [2, 5, 27, 28, 36, 41, 52, 65, 73, 76, 80, 84, 91, 92, 94, 95, 96, 97, 100, 101, 102, 107, 108, 109, 111, 112, 113, 116, 117, 118, 119, 120, 121, 122, 125, 128, 129, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 151, 152, 156, 157, 159, 160, 161, 162, 163, 164, 175, 185], "do": [2, 5, 16, 17, 28, 57, 65, 73, 74, 75, 76, 77, 78, 79, 81, 82, 83, 84, 85, 86, 88, 90, 93, 96, 98, 100, 101, 102, 104, 107, 113, 116, 117, 119, 122, 124, 125, 127, 129, 132, 133, 135, 138, 139, 142, 143, 146, 150, 151, 152, 153, 162, 179, 181, 185], "1d": [2, 28, 139], "5": [2, 3, 17, 28, 59, 65, 72, 73, 75, 76, 80, 81, 82, 83, 84, 85, 90, 91, 94, 95, 96, 97, 98, 99, 101, 102, 104, 105, 106, 107, 108, 109, 110, 111, 113, 114, 115, 117, 118, 119, 120, 121, 122, 123, 126, 129, 131, 132, 133, 134, 135, 137, 138, 139, 140, 141, 142, 145, 147, 150, 152, 153, 154, 156, 157, 158, 161, 162, 163, 164, 165, 181, 182, 185], "someon": [2, 73], "come": [2, 5, 28, 55, 73, 79, 115, 118, 125, 128, 134, 140, 151, 152, 162], "doe": [2, 5, 17, 23, 28, 35, 46, 73, 76, 81, 84, 85, 87, 88, 89, 91, 94, 95, 96, 97, 99, 100, 105, 108, 117, 120, 131, 132, 133, 137, 141, 145, 147, 150, 154, 156, 157, 162, 179, 183], "15": [2, 3, 73, 79, 81, 84, 91, 102, 104, 114, 116, 117, 120, 123, 142, 154, 156, 157, 163, 177], "continu": [2, 5, 36, 41, 42, 63, 73, 79, 101, 103, 104, 108, 140, 141, 142, 145, 147, 158, 161], "price": [2, 48, 72, 90, 91, 101, 104, 107, 108, 144, 145, 147], "descript": [2, 28, 48, 72, 73, 77, 82, 91, 92, 95, 97, 101, 102, 107, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164, 165], "room": [2, 48, 101, 107, 108], "surfac": [2, 28], "locat": [2, 74, 75, 91, 104, 105, 107, 141, 153], "ag": [2, 73, 76, 78, 79, 80, 81, 83, 84, 85, 101, 103, 107, 108, 130, 133, 136, 150, 151, 154], "mri": 2, "scan": [2, 5, 151], "want": [2, 17, 73, 74, 75, 79, 80, 84, 87, 89, 90, 91, 92, 94, 95, 97, 99, 100, 101, 102, 103, 105, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 125, 126, 128, 129, 131, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 150, 151, 152, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "tree": [2, 5, 9, 12, 14, 15, 16, 17, 57, 73, 81, 84, 85, 87, 89, 91, 95, 100, 101, 102, 108, 109, 110, 112, 113, 114, 116, 118, 119, 120, 121, 122, 123, 126, 132, 139, 143, 146, 148, 150, 154, 155, 158, 159, 160, 163, 164, 170, 171, 172, 173, 174, 175, 177], "piecewis": [2, 132, 162, 174], "constant": [2, 23, 83, 108, 129, 132, 135, 137, 139, 162, 174], "To": [2, 7, 8, 28, 36, 45, 46, 47, 49, 51, 53, 61, 66, 72, 73, 79, 80, 81, 85, 86, 88, 92, 94, 95, 96, 97, 99, 100, 101, 102, 108, 109, 110, 113, 116, 117, 122, 125, 129, 132, 133, 135, 137, 138, 139, 142, 145, 154, 156, 162, 166, 177], "given": [2, 5, 17, 28, 41, 48, 57, 65, 79, 81, 84, 93, 96, 98, 99, 101, 104, 106, 107, 108, 110, 115, 119, 126, 131, 132, 133, 135, 136, 137, 138, 140, 141, 142, 147, 152, 154, 163, 172, 179, 181, 182], "output": [2, 17, 41, 73, 76, 81, 84, 85, 100, 101, 105, 108, 110, 128, 129, 132, 134, 135, 141, 142, 147, 163], "correspond": [2, 17, 28, 46, 73, 76, 79, 84, 85, 86, 88, 94, 96, 101, 102, 104, 105, 106, 107, 108, 110, 112, 121, 135, 140, 141, 142, 150, 151, 153, 154, 162, 174, 177], "ridg": [2, 39, 43, 46, 107, 108, 110, 131, 133, 137], "order": [2, 5, 17, 28, 39, 46, 68, 73, 81, 85, 88, 89, 90, 94, 99, 101, 104, 114, 119, 123, 133, 140, 150, 153, 154, 185], "shrink": [2, 43, 133, 137], "constrain": [2, 39, 43, 60, 102, 132], "toward": [2, 43, 108, 125, 133, 137, 141], "zero": [2, 27, 28, 41, 42, 43, 52, 55, 73, 84, 102, 108, 133, 137, 139, 141], "2d": [2, 132, 139], "singl": [2, 14, 15, 17, 23, 24, 26, 27, 28, 34, 41, 42, 43, 46, 48, 52, 59, 63, 65, 68, 72, 73, 76, 79, 81, 84, 85, 90, 93, 96, 98, 99, 100, 101, 108, 110, 115, 118, 119, 120, 125, 126, 128, 129, 132, 134, 135, 139, 142, 143, 145, 146, 150, 151, 152, 157, 161, 163, 172, 173, 174, 175, 177, 179, 181, 185], "orient": [2, 108, 131, 137, 163], "clf": 2, "give": [2, 5, 13, 21, 23, 28, 32, 52, 55, 57, 70, 73, 76, 81, 83, 84, 85, 94, 95, 96, 99, 101, 105, 106, 108, 109, 110, 115, 116, 117, 118, 119, 125, 131, 133, 137, 138, 139, 141, 142, 145, 152, 156, 157, 163], "concret": [2, 28, 57], "graphic": [2, 65, 85, 99, 107, 140], "plot": [2, 17, 28, 46, 59, 65, 73, 74, 75, 81, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 114, 115, 121, 123, 125, 126, 128, 131, 132, 133, 134, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 152, 153, 154, 156, 157, 158, 159, 160, 161, 162, 163, 164, 179], "compos": [2, 72, 73, 80, 84, 85, 86, 87, 88, 89, 90, 93, 98, 104, 119, 130, 136, 145, 148, 150, 152, 154, 155, 161], "sinc": [2, 73, 76, 81, 92, 96, 97, 98, 99, 101, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 125, 133, 136, 138, 141, 142, 145, 150, 157, 158, 162, 163], "potenti": [2, 43, 48, 73, 81, 85, 95, 101, 102, 116, 138, 142, 152, 181], "choic": [2, 28, 35, 43, 52, 55, 57, 84, 97, 98, 101, 105, 107, 108, 110, 133, 138, 147, 150, 154, 156, 165, 185], "circl": [2, 73, 107, 110, 132], "vs": [2, 81, 84, 131, 137, 142, 163], "squar": [2, 27, 57, 105, 133, 134, 138, 139, 145, 147], "boil": 2, "down": [2, 5, 117], "fact": [2, 17, 81, 83, 89, 103, 105, 115, 133, 154, 164, 177], "exactli": [2, 43, 55, 59, 79, 89, 94, 185], "know": [2, 5, 85, 92, 97, 101, 102, 104, 107, 110, 115, 127, 133, 139, 142, 150], "frame": [2, 104, 105, 106, 107], "scienc": [2, 5, 35, 94, 105], "solv": [2, 5, 28, 43, 59, 73, 79, 85, 94, 97, 99, 101, 105, 106, 133, 138, 139, 140, 141, 145, 162, 185], "might": [2, 5, 28, 36, 48, 72, 83, 84, 85, 87, 89, 94, 95, 99, 100, 101, 102, 105, 108, 117, 134, 142, 145, 150, 154, 163], "speci": [2, 17, 74, 75, 109, 131, 137, 141, 157, 158, 159, 161, 163, 185], "commonli": [2, 73, 79, 80], "denot": 2, "eventu": 2, "ideal": [2, 101, 142, 145], "let": [2, 5, 17, 24, 42, 46, 72, 73, 76, 77, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 110, 115, 117, 125, 126, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 142, 145, 147, 150, 151, 154, 157, 158, 161, 162, 185], "On": [2, 5, 28, 79, 81, 82, 85, 90, 96, 101, 108, 109, 110, 117, 122, 124, 127, 132, 133, 135, 136, 137, 138, 139, 142, 145, 150, 152, 154, 157, 162, 163, 164], "figur": [2, 26, 76, 79, 99, 101, 103, 108, 109, 110, 115, 141, 152, 153, 154, 157, 163, 179], "mathemat": [2, 57, 92, 97, 132, 139, 140, 141, 145], "b": [2, 14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 128, 134, 140, 154, 156, 172, 173, 174, 175, 177, 179, 181, 185], "creat": [2, 28, 43, 46, 59, 68, 70, 71, 72, 76, 77, 79, 81, 82, 84, 85, 92, 93, 95, 97, 98, 99, 102, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 118, 120, 121, 122, 123, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 141, 142, 143, 144, 146, 147, 150, 151, 152, 154, 157, 159, 160, 162, 163, 164, 173, 177, 182], "infin": 2, "vari": [2, 17, 79, 92, 95, 97, 99, 102, 105, 108, 123, 125, 133, 138, 140, 142, 151, 156, 177], "fulfil": 2, "requir": [2, 5, 12, 17, 21, 28, 32, 35, 38, 39, 42, 46, 55, 59, 70, 72, 73, 76, 79, 81, 85, 92, 97, 98, 105, 109, 116, 117, 132, 133, 147, 152, 154, 156, 161, 170, 177, 182, 185], "minim": [2, 5, 39, 57, 63, 91, 96, 101, 102, 137, 138, 145, 147, 157, 173], "sum": [2, 39, 41, 57, 73, 80, 82, 115, 134, 136, 141, 142], "error": [2, 5, 15, 27, 28, 39, 41, 50, 52, 55, 57, 58, 60, 73, 76, 80, 86, 88, 91, 94, 95, 96, 102, 109, 111, 112, 113, 115, 116, 117, 119, 120, 121, 122, 128, 129, 133, 134, 135, 138, 139, 142, 144, 145, 147, 148, 155, 165, 173], "red": [2, 26, 76, 101, 103, 109, 110, 115, 131, 132, 136, 137, 141, 152, 161], "best": [2, 12, 17, 21, 32, 38, 52, 57, 59, 73, 90, 96, 97, 99, 102, 111, 113, 114, 116, 117, 119, 120, 122, 123, 128, 133, 134, 137, 138, 139, 142, 145, 148, 149, 150, 151, 152, 154, 155, 156, 157, 170, 179, 182, 183, 185], "possibl": [2, 5, 14, 28, 35, 41, 42, 57, 59, 63, 68, 73, 81, 84, 91, 95, 96, 99, 101, 102, 103, 108, 110, 116, 117, 119, 127, 128, 129, 132, 133, 134, 135, 137, 139, 141, 145, 147, 149, 150, 152, 153, 156, 159, 160, 162, 163, 164, 179, 185], "abstract": [2, 105], "manner": [2, 13, 28, 84, 94, 133, 147], "state": [2, 5, 13, 59, 73, 79, 80, 81, 84, 85, 107, 136, 150, 154], "jockei": 2, "wheel": 2, "i": [2, 12, 17, 19, 21, 28, 46, 70, 71, 72, 73, 76, 79, 80, 81, 83, 84, 85, 88, 89, 95, 96, 99, 101, 103, 108, 110, 115, 116, 117, 118, 128, 129, 132, 133, 134, 135, 138, 141, 142, 145, 147, 150, 156, 157, 165, 179, 185], "support": [2, 5, 81, 87, 89, 92, 97, 132, 136, 139, 143, 146, 163], "standardscal": [2, 28, 46, 59, 65, 72, 76, 81, 85, 87, 89, 90, 92, 97, 98, 99, 107, 108, 130, 131, 132, 136, 137, 141, 149, 151, 156, 177, 179, 181, 185], "columntransform": [2, 71, 85, 87, 89, 90, 132, 136, 148, 150, 152, 154, 155], "enough": [2, 5, 88, 89, 98, 101, 102, 115, 117, 137, 145, 151, 157, 161, 163, 177], "flexibl": [2, 5, 52, 55, 57, 60, 92, 97, 102, 119, 157], "opposit": 2, "cluster": [2, 101, 103, 172], "whose": [2, 80, 116, 151], "group": [2, 5, 19, 24, 28, 73, 99, 100, 101, 107, 130, 136, 165], "subset": [2, 16, 41, 43, 46, 70, 73, 76, 79, 80, 85, 90, 101, 105, 107, 117, 119, 122, 124, 125, 126, 127, 148, 155, 157, 158, 161, 177], "them": [2, 5, 12, 59, 73, 75, 76, 81, 84, 85, 93, 96, 98, 101, 108, 110, 112, 116, 117, 118, 121, 129, 132, 133, 135, 139, 141, 142, 145, 150, 151, 152, 156, 160, 163, 164, 177, 185], "broad": 2, "topic": [2, 101, 107], "custom": [2, 139], "commerc": 2, "websit": [2, 35, 48, 77, 82], "although": 2, "mention": [2, 48, 76, 92, 97, 100, 104, 109, 116, 118, 131, 132, 133, 137, 138, 141, 142, 143, 145, 146, 151, 154, 164], "cover": [2, 49, 73, 76, 79, 84, 85, 137], "impli": [2, 161], "fix": [2, 28, 46, 52, 57, 84, 92, 97, 117, 133, 149, 150, 156, 161, 163, 177, 179, 183], "like": [2, 5, 17, 24, 28, 43, 52, 73, 79, 80, 84, 85, 86, 88, 96, 98, 101, 104, 106, 108, 109, 110, 117, 125, 133, 134, 139, 140, 141, 142, 143, 146, 147, 151], "necessari": [2, 5, 43, 73, 117, 132, 152], "subdivid": [2, 157], "select": [2, 5, 12, 14, 15, 16, 17, 21, 22, 23, 24, 25, 26, 27, 28, 32, 33, 34, 38, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 74, 75, 76, 78, 79, 81, 83, 86, 87, 88, 89, 96, 99, 101, 103, 108, 110, 115, 117, 119, 123, 124, 127, 128, 130, 134, 136, 138, 139, 141, 150, 151, 152, 153, 156, 158, 170, 172, 173, 174, 175, 177, 179, 181, 185], "final": [2, 12, 22, 38, 43, 55, 59, 70, 80, 81, 84, 85, 96, 100, 101, 105, 107, 110, 114, 115, 117, 122, 123, 124, 126, 127, 133, 137, 143, 144, 146, 147, 150, 151, 152, 160, 164, 182], "sometim": [2, 5, 57, 85, 98, 142, 145, 152, 154], "clear": [2, 73, 97, 102, 161], "mani": [2, 5, 28, 41, 59, 72, 73, 74, 75, 80, 81, 84, 85, 96, 98, 99, 101, 102, 104, 107, 110, 117, 119, 129, 130, 132, 133, 135, 136, 142, 151, 156, 185], "need": [2, 5, 21, 23, 28, 36, 39, 59, 65, 73, 77, 79, 81, 82, 84, 85, 90, 92, 96, 97, 101, 102, 105, 108, 109, 110, 115, 117, 118, 119, 120, 130, 132, 133, 136, 137, 138, 139, 143, 144, 146, 147, 150, 151, 152, 154, 155, 161, 175, 177], "criteria": [2, 117], "ml": [2, 5, 94], "cheatsheet": 2, "readthedoc": 2, "io": [2, 35], "en": 2, "latest": [2, 27], "googl": 2, "develop": [2, 5, 35, 70, 73, 86, 88, 105, 129, 135, 139], "com": [2, 5, 35], "advanc": [2, 5, 35, 70, 87, 89], "relat": [2, 13, 17, 22, 33, 39, 55, 57, 71, 73, 85, 103, 137, 139, 142, 171, 182, 183], "terminolog": 2, "modifi": [3, 5, 116, 118, 139], "run": [3, 5, 17, 43, 65, 77, 82, 91, 95, 96, 108, 114, 123, 133, 153, 154, 177, 179, 185], "statu": [3, 24, 73, 79, 84, 85, 136, 150, 152, 154], "python_script": 3, "01_tabular_data_explor": 3, "2024": 3, "04": [3, 11, 29, 44, 67, 81, 109, 117, 133, 163, 165, 168], "26": [3, 91, 104, 105, 120], "13": [3, 73, 79, 81, 91, 94, 104, 105, 106, 117, 120, 132, 145, 156], "19": [3, 75, 84, 91, 104, 105, 116, 117, 120, 129, 133, 135, 138, 143, 146, 153, 156, 158], "cach": 3, "7": [3, 17, 46, 72, 73, 75, 81, 82, 84, 91, 94, 97, 98, 99, 101, 104, 105, 107, 108, 116, 117, 120, 129, 133, 135, 142, 147, 150, 153, 154, 156, 158, 161, 163, 165, 177, 185], "52": [3, 101, 104, 105, 107, 108, 120, 125, 130, 136, 157], "01_tabular_data_exploration_ex_01": 3, "76": [3, 83, 97, 104, 106, 142], "01_tabular_data_exploration_sol_01": 3, "20": [3, 17, 28, 59, 73, 75, 84, 85, 90, 91, 96, 101, 102, 103, 104, 105, 106, 107, 113, 117, 118, 120, 122, 129, 130, 133, 135, 136, 142, 147, 148, 149, 155, 156, 158], "82": [3, 78, 80, 83, 100, 116, 117], "02_numerical_pipeline_cross_valid": 3, "02_numerical_pipeline_ex_00": 3, "75": [3, 59, 79, 81, 84, 90, 91, 98, 100, 104, 107], "02_numerical_pipeline_ex_01": 3, "68": [3, 75, 90, 104, 146, 154, 156], "02_numerical_pipeline_hands_on": 3, "92": [3, 80, 84, 91, 145, 153, 185], "02_numerical_pipeline_introduct": 3, "77": [3, 104, 106, 116, 142], "02_numerical_pipeline_sc": 3, "23": [3, 84, 91, 101, 104, 105, 106, 107, 108, 116, 163], "02_numerical_pipeline_sol_00": 3, "21": [3, 84, 91, 101, 104, 105, 107, 108, 147, 154, 156], "02_numerical_pipeline_sol_01": 3, "57": [3, 104, 105, 120], "03_categorical_pipelin": 3, "95": [3, 100, 107, 154], "03_categorical_pipeline_column_transform": 3, "55": [3, 95, 100, 104, 105, 117], "03_categorical_pipeline_ex_01": 3, "71": [3, 104, 162], "03_categorical_pipeline_ex_02": 3, "97": [3, 153, 163, 185], "03_categorical_pipeline_sol_01": 3, "03_categorical_pipeline_sol_02": 3, "27": [3, 73, 84, 91, 104, 105, 120, 130, 136], "03_categorical_pipeline_visu": 3, "79": [3, 72, 104], "cross_validation_baselin": 3, "9": [3, 28, 72, 73, 81, 82, 90, 91, 94, 97, 98, 99, 101, 104, 107, 108, 116, 117, 118, 120, 133, 135, 143, 145, 146, 147, 150, 154, 156, 158, 185], "33": [3, 101, 104, 107, 117, 153], "cross_validation_ex_01": 3, "cross_validation_ex_02": 3, "31": [3, 104, 114, 117, 123, 135, 142, 154], "cross_validation_group": 3, "6": [3, 12, 21, 38, 46, 70, 72, 73, 75, 81, 82, 90, 91, 94, 97, 98, 99, 101, 104, 105, 107, 108, 115, 116, 117, 118, 120, 129, 131, 133, 135, 137, 142, 145, 146, 147, 150, 153, 154, 156, 157, 158, 161, 162, 165, 177], "07": [3, 100, 108, 133, 150], "cross_validation_learning_curv": 3, "12": [3, 73, 81, 90, 91, 100, 103, 104, 105, 106, 107, 115, 117, 120, 142, 150, 156, 163], "49": [3, 84, 104, 116, 117, 125, 138, 154], "cross_validation_nest": 3, "22": [3, 91, 101, 104, 105, 107, 108, 130, 136, 145, 147, 153], "48": [3, 79, 80, 81, 91, 104, 120, 153], "cross_validation_sol_01": 3, "cross_validation_sol_02": 3, "cross_validation_stratif": 3, "08": [3, 28, 105, 133], "cross_validation_tim": 3, "cross_validation_train_test": 3, "cross_validation_validation_curv": 3, "18": [3, 28, 73, 75, 79, 91, 104, 105, 117, 120, 129, 130, 135, 136, 139, 150, 151, 153, 154, 156, 158], "38": [3, 73, 79, 81, 84, 104, 118, 120, 130, 136, 150, 151, 154], "datasets_adult_censu": 3, "54": [3, 104, 105, 117, 120], "datasets_ames_h": 3, "29": [3, 91, 104, 108, 116, 118, 120, 125, 139, 154], "datasets_bike_rid": 3, "42": [3, 72, 79, 81, 83, 85, 104, 117, 120, 127, 132, 148, 149, 150, 152, 154, 155, 156], "datasets_blood_transfus": 3, "datasets_california_h": 3, "45": [3, 73, 80, 81, 84, 91, 101, 102, 104, 106, 116, 120, 134, 140, 141, 157], "dev_features_import": 3, "24": [3, 91, 101, 104, 105, 106, 107, 108, 142, 147, 154], "53": [3, 104, 105, 116, 125, 154], "ensemble_adaboost": 3, "ensemble_bag": 3, "ensemble_ex_01": 3, "66": [3, 90, 104, 105], "ensemble_ex_02": 3, "43": [3, 85, 91, 94, 101, 104, 105, 116, 118, 120], "ensemble_ex_03": 3, "ensemble_ex_04": 3, "ensemble_gradient_boost": 3, "25": [3, 59, 73, 79, 80, 81, 85, 91, 101, 102, 104, 105, 107, 108, 117, 120, 130, 134, 136, 140, 150, 151, 154], "63": [3, 104, 110, 154], "ensemble_hist_gradient_boost": 3, "41": [3, 80, 100, 101, 104, 107, 108, 117], "35": [3, 73, 104, 106, 117, 122, 136], "ensemble_hyperparamet": 3, "58": [3, 104, 105, 130, 135, 136, 150], "44": [3, 73, 79, 91, 101, 104, 120, 130, 135, 136, 150, 151, 154], "ensemble_introduct": 3, "28": [3, 73, 79, 81, 84, 91, 101, 104, 105, 106, 116, 120, 130, 136, 150, 151, 154], "ensemble_random_forest": 3, "ensemble_sol_01": 3, "64": [3, 94, 104, 105, 154], "ensemble_sol_02": 3, "ensemble_sol_03": 3, "62": [3, 90, 104, 118], "16": [3, 73, 81, 84, 89, 91, 94, 104, 106, 116, 117, 118, 120, 147, 153, 156, 157, 163], "ensemble_sol_04": 3, "30": [3, 73, 79, 84, 85, 91, 92, 94, 95, 97, 102, 104, 106, 107, 110, 116, 117, 118, 120, 130, 135, 136, 148, 150, 151, 152, 154, 155, 161, 163, 164], "40": [3, 28, 73, 75, 79, 80, 81, 85, 101, 104, 117, 129, 130, 134, 135, 136, 140, 149, 150, 151, 154, 156, 158], "feature_selection_ex_01": 3, "34": [3, 104, 107, 153, 154], "feature_selection_introduct": 3, "78": [3, 104, 142, 157], "feature_selection_limitation_model": 3, "feature_selection_sol_01": 3, "linear_models_ex_01": 3, "linear_models_ex_02": 3, "linear_models_ex_03": 3, "linear_models_ex_04": 3, "linear_models_feature_engineering_classif": 3, "06": 3, "linear_models_regular": 3, "linear_models_sol_01": 3, "32": [3, 85, 104, 117, 154], "01": [3, 10, 18, 31, 37, 56, 58, 62, 81, 96, 105, 117, 118, 130, 131, 136, 137, 150, 151, 153, 165, 167, 169, 180], "linear_models_sol_02": 3, "linear_models_sol_03": 3, "linear_models_sol_04": 3, "linear_regression_in_sklearn": 3, "linear_regression_non_linear_link": 3, "65": [3, 84, 90, 104, 118], "linear_regression_without_sklearn": 3, "logistic_regress": [3, 131, 132, 137, 141], "metrics_classif": 3, "metrics_ex_01": 3, "81": [3, 28, 72, 78, 83, 116, 117], "metrics_ex_02": 3, "metrics_regress": 3, "metrics_sol_01": 3, "metrics_sol_02": 3, "parameter_tuning_ex_02": 3, "72": [3, 104], "parameter_tuning_ex_03": 3, "8": [3, 59, 73, 75, 79, 82, 84, 90, 91, 94, 97, 98, 99, 100, 101, 103, 104, 107, 108, 110, 114, 115, 116, 117, 118, 120, 121, 123, 131, 132, 133, 136, 137, 139, 142, 147, 148, 150, 153, 154, 155, 156, 157, 162, 163, 177, 179, 185], "parameter_tuning_grid_search": 3, "parameter_tuning_manu": 3, "46": [3, 84, 91, 101, 104, 115, 116, 156], "parameter_tuning_nest": 3, "11": [3, 73, 84, 91, 104, 117, 118, 120, 133, 150, 156, 161, 177], "parameter_tuning_parallel_plot": 3, "74": [3, 104], "parameter_tuning_randomized_search": 3, "parameter_tuning_sol_02": 3, "89": [3, 81, 91, 125], "parameter_tuning_sol_03": 3, "trees_classif": 3, "trees_dataset": 3, "trees_ex_01": 3, "61": [3, 100, 104, 117, 133, 154], "trees_ex_02": 3, "trees_hyperparamet": 3, "trees_regress": 3, "17": [3, 28, 73, 75, 79, 81, 91, 101, 104, 116, 117, 120, 129, 135, 141, 145, 147, 153, 154, 156, 157, 158], "trees_sol_01": 3, "96": 3, "trees_sol_02": 3, "51": [3, 104, 185], "lot": [5, 73, 107, 108, 127, 133, 142], "materi": 5, "far": [5, 46, 91, 102, 115, 122, 126, 133], "congratul": 5, "And": [5, 100], "thank": [5, 132, 152], "everyon": 5, "instructor": 5, "staff": 5, "help": [5, 59, 65, 81, 83, 87, 89, 92, 93, 97, 98, 102, 104, 105, 106, 107, 108, 117, 124, 127, 131, 132, 133, 137, 141, 142, 145, 185], "forum": [5, 35], "student": [5, 35], "hard": [5, 72, 73, 74, 75, 124, 127, 141, 142, 147], "work": [5, 39, 59, 64, 73, 81, 84, 85, 99, 100, 105, 110, 117, 119, 126, 127, 130, 133, 136, 148, 151, 153, 155, 157, 159, 162, 163, 165, 170, 185], "summar": [5, 101, 109, 117, 119], "train": [5, 14, 15, 16, 17, 24, 25, 28, 33, 34, 36, 39, 41, 43, 46, 50, 52, 55, 57, 58, 59, 60, 65, 68, 70, 71, 73, 76, 77, 78, 81, 82, 83, 84, 85, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 102, 103, 105, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 138, 139, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 165, 174, 175, 177, 182, 185], "test": [5, 17, 25, 28, 33, 34, 36, 43, 46, 52, 55, 57, 58, 59, 65, 71, 72, 76, 77, 78, 81, 82, 83, 84, 85, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 102, 108, 110, 111, 112, 113, 114, 115, 117, 118, 120, 121, 122, 123, 124, 125, 126, 127, 130, 131, 133, 135, 136, 137, 141, 142, 143, 145, 146, 148, 150, 151, 152, 154, 155, 156, 159, 162, 163, 165, 175, 177, 179, 185], "built": [5, 36, 93, 98, 108, 110, 139, 142, 161, 163, 173], "matrix": [5, 28, 81, 84, 94, 103, 119, 124, 127, 133, 139], "featur": [5, 14, 15, 16, 17, 23, 28, 32, 33, 34, 36, 39, 41, 42, 46, 48, 57, 59, 63, 64, 65, 68, 70, 71, 72, 73, 74, 75, 76, 79, 80, 85, 86, 88, 90, 91, 93, 97, 98, 101, 103, 104, 105, 106, 107, 109, 110, 115, 116, 117, 119, 124, 127, 129, 130, 135, 136, 140, 141, 145, 150, 151, 152, 156, 157, 158, 159, 160, 161, 162, 163, 164, 172, 173, 177, 185], "observ": [5, 27, 43, 46, 50, 52, 73, 75, 81, 83, 84, 85, 87, 89, 91, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 105, 106, 109, 110, 113, 115, 117, 119, 122, 123, 125, 132, 133, 135, 136, 137, 139, 141, 142, 145, 150, 153, 156, 157, 158, 161, 162, 164, 174], "transform": [5, 28, 39, 42, 65, 68, 80, 81, 84, 85, 90, 93, 96, 98, 101, 102, 105, 110, 116, 119, 124, 127, 129, 130, 132, 133, 135, 136, 137, 139, 145, 150, 151, 152, 153, 154, 179], "often": [5, 39, 41, 43, 57, 60, 73, 84, 85, 96, 98, 110, 137, 139, 145, 147, 152, 177, 182, 185], "typic": [5, 12, 48, 70, 73, 84, 101, 107, 117, 124, 127, 132, 141, 145, 147, 152, 154, 172], "categor": [5, 36, 46, 63, 68, 70, 71, 72, 73, 74, 75, 86, 88, 103, 104, 119, 130, 132, 133, 136, 141, 142, 150, 158, 165, 177], "variabl": [5, 23, 28, 35, 39, 46, 48, 57, 59, 63, 65, 67, 68, 72, 76, 77, 80, 81, 82, 86, 88, 91, 94, 95, 100, 101, 102, 103, 104, 106, 107, 110, 117, 119, 130, 133, 136, 140, 141, 145, 150, 152, 154, 156, 158, 165, 177, 179, 185], "inform": [5, 23, 28, 46, 72, 73, 76, 80, 84, 85, 91, 94, 95, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 115, 117, 118, 125, 126, 133, 139, 141, 142, 143, 146, 150, 151, 154, 156, 158], "seek": [5, 33, 117, 118, 142], "suffic": [5, 117], "But": [5, 90, 97, 100, 101, 107, 132, 133, 152, 153], "larg": [5, 17, 46, 68, 85, 96, 101, 105, 107, 113, 114, 116, 117, 122, 123, 126, 127, 131, 133, 135, 137, 145, 149, 152, 153, 154, 156, 179], "detect": 5, "underfit": [5, 12, 21, 28, 32, 38, 42, 43, 50, 52, 55, 57, 58, 59, 60, 95, 115, 117, 118, 119, 131, 132, 135, 137, 139, 157, 165, 170, 172], "multipl": [5, 84, 94, 108, 110, 115, 129, 130, 135, 136, 139, 143, 144, 146, 147, 153, 172, 173], "hyper": [5, 17, 36, 96, 98, 152, 156, 179], "control": [5, 43, 77, 81, 82, 92, 96, 97, 102, 117, 119, 125, 131, 132, 137, 139, 148, 149, 151, 153, 155, 156, 157, 161, 172, 182], "import": [5, 12, 17, 21, 28, 35, 36, 46, 57, 59, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 162, 163, 164, 165, 168, 170, 177, 179, 181, 185], "randomsearchcv": 5, "understand": [5, 12, 21, 32, 38, 55, 77, 79, 82, 84, 94, 95, 99, 102, 128, 134, 142, 161, 162, 170, 171, 182], "suit": [5, 132, 145, 171], "intuit": [5, 9, 10, 12, 13, 21, 38, 44, 55, 70, 79, 80, 84, 103, 105, 107, 109, 110, 115, 123, 131, 132, 137, 138, 139, 141, 157, 158, 162, 163, 165], "debug": 5, "build": [5, 17, 28, 46, 70, 72, 73, 80, 90, 92, 97, 110, 116, 126, 129, 132, 135, 150, 161, 165, 167], "combin": [5, 12, 13, 15, 41, 42, 65, 73, 79, 81, 85, 94, 96, 106, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 123, 126, 132, 133, 135, 138, 139, 147, 148, 149, 150, 152, 154, 155, 156, 157, 172, 173, 179, 182, 183], "particularli": [5, 72, 86, 88, 117], "few": [5, 73, 74, 75, 78, 79, 83, 101, 104, 106, 107, 117, 119, 129, 132, 135, 137, 152, 158], "benefit": [5, 16, 23, 32, 81, 84, 95, 115, 118, 119, 133, 145, 165], "non": [5, 19, 28, 38, 41, 42, 43, 46, 65, 68, 73, 81, 89, 92, 96, 97, 98, 103, 104, 105, 106, 107, 108, 110, 125, 129, 133, 135, 141, 156, 157, 162, 164, 165, 171, 172, 183], "engin": [5, 39, 42, 46, 94, 105, 129, 130, 133, 135, 136, 165], "base": [5, 12, 13, 14, 15, 17, 28, 33, 35, 41, 48, 59, 73, 74, 75, 81, 87, 89, 94, 101, 103, 105, 110, 115, 118, 119, 125, 126, 129, 130, 135, 136, 137, 139, 141, 142, 147, 150, 152, 157, 158, 165, 185], "seri": [5, 91, 93, 94, 98, 100, 108, 110, 115, 137, 141, 142, 157], "threshold": [5, 14, 26, 73, 90, 107, 157, 162, 163, 173], "variou": [5, 55, 90], "attribut": [5, 36, 73, 81, 84, 94, 96, 101, 107, 108, 110, 112, 121, 131, 137, 138, 139, 141, 149, 150, 154, 156, 179, 185], "tabular": [5, 63, 70, 73, 80, 85, 165], "natur": [5, 21, 35, 80, 84, 85, 101, 105, 110, 132, 133, 154], "miss": [5, 73, 84, 94, 101, 104, 106, 107, 129, 135, 154, 185], "histgradientboostingregressor": [5, 17, 28, 116, 117, 123, 147], "classifi": [5, 14, 23, 26, 41, 68, 74, 75, 78, 79, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 94, 97, 98, 99, 106, 109, 110, 119, 125, 130, 131, 132, 136, 137, 141, 143, 146, 147, 148, 150, 151, 152, 154, 155, 157, 159, 161, 163, 171, 172, 179, 181, 185], "goto": 5, "strongli": [5, 108], "advis": [5, 108], "pointer": 5, "doc": 5, "rich": 5, "didact": [5, 35, 84, 85, 104], "improv": [5, 35, 65, 89, 90, 94, 95, 97, 98, 102, 111, 113, 117, 120, 122, 139, 145, 153, 179, 182], "compris": [5, 142], "guid": [5, 132, 141, 145, 185], "everi": [5, 73, 76, 101, 103, 105, 108, 138, 152], "explain": [5, 17, 28, 38, 57, 85, 108, 115, 116, 122, 126, 137, 145, 150, 170], "demonstr": [5, 85, 91, 109, 115, 116, 133, 139, 161], "good": [5, 21, 24, 46, 73, 76, 78, 79, 80, 83, 84, 85, 87, 89, 99, 100, 101, 102, 106, 108, 117, 118, 119, 126, 128, 129, 133, 134, 135, 137, 138, 141, 142, 150, 152, 153, 154, 156, 157, 179], "softwar": [5, 35, 80], "ask": [5, 124, 127, 128, 131, 134, 137, 142], "question": [5, 79, 87, 89, 104, 128, 129, 130, 134, 135, 136, 137, 142, 156], "stackoverflow": 5, "github": [5, 35, 79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "discuss": [5, 13, 17, 35, 46, 73, 76, 80, 109, 115, 117, 118, 131, 133, 137, 157], "driven": [5, 137], "inclus": 5, "contribut": [5, 59, 81, 94, 133, 156, 179], "other": [5, 13, 26, 28, 35, 43, 59, 72, 73, 76, 80, 81, 84, 85, 86, 88, 91, 92, 93, 97, 98, 99, 100, 103, 105, 108, 110, 117, 125, 129, 133, 135, 136, 138, 139, 141, 145, 150, 152, 153, 154, 160, 163, 164, 179, 182, 185], "advocaci": 5, "curat": 5, "our": [5, 17, 24, 28, 41, 46, 62, 63, 70, 76, 78, 79, 80, 81, 83, 85, 86, 88, 90, 91, 92, 94, 95, 96, 97, 99, 100, 101, 102, 103, 104, 105, 106, 108, 109, 110, 113, 114, 115, 117, 122, 123, 124, 125, 127, 128, 131, 132, 133, 134, 137, 138, 139, 140, 141, 142, 143, 145, 146, 151, 153, 157, 158, 162, 164, 165, 185], "overflow": 5, "code": [5, 28, 35, 48, 68, 74, 77, 78, 86, 92, 93, 103, 110, 111, 112, 113, 114, 116, 124, 127, 128, 129, 130, 131, 133, 134, 139, 141, 143, 144, 148, 149, 150, 152, 153, 155, 156, 159, 160, 179], "start": [5, 35, 46, 73, 77, 78, 79, 81, 82, 83, 84, 85, 87, 89, 91, 94, 95, 98, 99, 100, 101, 105, 106, 115, 117, 118, 124, 125, 127, 129, 130, 131, 132, 133, 135, 136, 137, 140, 141, 142, 145, 147, 149, 151, 156, 157, 158], "carpentri": 5, "resourc": [5, 35, 70, 73, 105, 117, 152], "git": 5, "lab": [5, 35], "unsupervis": [5, 48], "structur": [5, 57, 70, 73, 81, 84, 85, 94, 103, 117, 145, 156, 157, 162, 171], "instanc": [5, 46, 63, 73, 79, 80, 81, 84, 94, 101, 104, 105, 107, 108, 111, 114, 120, 123, 124, 125, 126, 127, 129, 132, 133, 135, 138, 140, 142, 144, 145, 147, 151, 154, 175, 181, 182], "sampl": [5, 13, 14, 15, 17, 19, 28, 41, 43, 48, 58, 60, 63, 73, 74, 75, 76, 79, 80, 81, 84, 85, 92, 93, 96, 97, 98, 99, 100, 101, 102, 104, 105, 106, 107, 108, 109, 110, 113, 115, 116, 117, 119, 122, 124, 127, 128, 130, 132, 133, 134, 136, 137, 139, 140, 141, 142, 147, 149, 150, 152, 154, 156, 157, 158, 160, 161, 162, 164, 165, 174, 183], "supervis": [5, 48, 101, 172], "recov": [5, 17, 94, 133], "link": [5, 14, 78, 79, 83, 94, 104, 105, 106, 107, 108, 117, 124, 127, 142, 145], "drive": 5, "system": [5, 73, 94], "hand": [5, 28, 90, 94, 109, 122, 133, 135, 139, 150, 152], "nuanc": 5, "deep": [5, 89, 117, 118, 161], "better": [5, 16, 17, 27, 46, 72, 76, 80, 83, 87, 88, 89, 90, 91, 92, 93, 94, 96, 97, 98, 99, 101, 102, 110, 117, 118, 119, 124, 127, 128, 130, 133, 134, 136, 137, 138, 147, 151, 152, 154, 156, 161, 177, 185], "gradient": [5, 9, 12, 13, 15, 16, 17, 28, 81, 85, 109, 113, 114, 118, 122, 123, 147, 150, 154, 165], "boost": [5, 12, 13, 15, 16, 17, 28, 85, 113, 114, 118, 122, 123, 150, 154, 165], "classif": [5, 14, 21, 37, 38, 41, 48, 59, 63, 72, 73, 79, 80, 86, 88, 90, 92, 93, 94, 97, 98, 99, 101, 106, 109, 129, 130, 132, 135, 136, 140, 143, 144, 145, 146, 147, 161, 162, 163, 165, 170, 171, 172, 185], "regress": [5, 17, 21, 26, 37, 38, 39, 40, 41, 42, 43, 44, 48, 52, 57, 59, 65, 72, 79, 81, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 97, 98, 99, 101, 107, 110, 112, 115, 121, 124, 127, 129, 130, 131, 135, 136, 137, 141, 142, 144, 147, 149, 151, 156, 157, 160, 161, 164, 165, 170, 171, 172, 177, 185], "nativ": [5, 73, 79, 84, 85, 101, 125, 136, 139, 150, 152, 154], "task": [5, 41, 72, 73, 79, 90, 101, 103, 106, 132, 141, 172], "input": [5, 23, 39, 41, 57, 72, 73, 78, 80, 81, 83, 84, 87, 88, 89, 91, 94, 97, 98, 108, 110, 128, 134, 139, 140, 141, 152, 157, 158, 172, 185], "speech": 5, "text": [5, 35, 48, 105, 132, 140], "imag": [5, 94], "voic": 5, "pretrain": 5, "human": [5, 73, 105, 132], "cost": [5, 80, 101, 105, 118, 136, 137, 153, 154], "mainten": 5, "Not": [5, 85, 86, 88, 96, 136], "look": [5, 28, 62, 63, 65, 74, 75, 77, 79, 80, 82, 84, 94, 95, 97, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 117, 129, 131, 133, 135, 137, 139, 140, 142, 150, 157, 158, 165, 179, 185], "pytorch": 5, "tensorflow": 5, "introduct": [5, 55, 70, 165], "andrea": 5, "c": [5, 14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 94, 96, 130, 132, 136, 141, 151, 163, 172, 173, 174, 175, 177, 179, 181, 185], "m\u00fcller": 5, "sarah": 5, "guido": 5, "handbook": 5, "jake": 5, "van": 5, "der": 5, "pla": 5, "broader": [5, 160, 164], "statist": [5, 17, 55, 73, 75, 80, 81, 84, 87, 89, 101, 103, 107, 109, 110, 124, 127, 142, 185], "jame": 5, "witten": 5, "hasti": 5, "tibshirani": 5, "theori": [5, 109], "concept": [5, 12, 13, 21, 22, 32, 33, 38, 39, 55, 57, 71, 94, 99, 101, 147, 170, 171, 183], "explor": [5, 46, 74, 75, 79, 81, 84, 97, 103, 112, 114, 121, 123, 132, 133, 139, 149, 150, 152, 153, 154, 156, 165, 185], "kera": 5, "aur\u00e9lien": 5, "g\u00e9ron": 5, "kaggl": 5, "particip": 5, "challeng": [5, 35, 106, 139], "team": 5, "solut": [5, 9, 10, 11, 17, 18, 29, 30, 31, 35, 37, 40, 44, 58, 62, 64, 67, 84, 100, 124, 125, 138, 150, 165, 167, 176, 178, 180], "share": [5, 110], "winner": 5, "wai": [5, 70, 72, 73, 76, 79, 83, 84, 96, 97, 98, 100, 109, 110, 115, 116, 117, 124, 125, 127, 139, 142, 145, 162, 177, 185], "now": [5, 17, 28, 46, 59, 72, 76, 77, 79, 80, 81, 82, 83, 84, 85, 86, 88, 91, 92, 93, 94, 95, 97, 98, 99, 100, 101, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 139, 140, 141, 142, 143, 146, 148, 150, 152, 153, 154, 155, 157, 159, 160, 161, 163, 164, 177, 185], "touch": 5, "briefli": 5, "fit": [5, 13, 23, 25, 28, 38, 39, 41, 42, 43, 46, 50, 63, 65, 76, 77, 79, 82, 83, 84, 87, 89, 90, 92, 96, 97, 98, 100, 101, 102, 108, 109, 110, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 128, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 141, 142, 145, 147, 150, 151, 152, 153, 154, 155, 156, 157, 159, 160, 161, 162, 163, 164, 165, 170, 174, 179], "wider": [5, 35, 52], "mai": [5, 24, 42, 43, 46, 48, 73, 79, 81, 101, 107, 117, 129, 130, 133, 135, 136, 137, 138, 141, 142, 147, 154, 156, 161, 163], "fail": [5, 100, 132, 143, 146], "weak": [5, 15, 117, 137, 154], "analysi": [5, 70, 79, 92, 97, 106, 107, 133, 145, 154, 156, 157, 165, 178], "kei": [5, 7, 8, 45, 47, 49, 51, 53, 61, 65, 66, 92, 97, 99, 102, 117, 120, 123, 125, 126, 137, 142, 150, 151, 154, 161, 166], "achiev": [5, 17, 73, 78, 81, 83, 95, 97, 106, 132, 133, 161], "reliabl": [5, 94, 141], "even": [5, 35, 38, 60, 75, 76, 80, 84, 85, 87, 88, 89, 94, 98, 100, 101, 103, 109, 116, 117, 122, 125, 132, 133, 137, 139, 141, 142, 143, 144, 146, 147, 150, 156, 182, 183], "cross": [5, 12, 17, 21, 22, 23, 24, 25, 28, 32, 33, 38, 39, 43, 46, 55, 56, 58, 59, 64, 65, 72, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 97, 98, 99, 100, 102, 103, 107, 108, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 130, 133, 135, 136, 142, 143, 144, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 161, 165, 170, 175, 177, 179, 182, 183, 185], "accuraci": [5, 17, 26, 59, 65, 72, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 92, 94, 96, 97, 98, 99, 106, 109, 115, 125, 126, 127, 128, 130, 131, 134, 136, 137, 141, 143, 146, 147, 150, 151, 152, 153, 154, 155, 156, 157, 159, 163, 185], "imperfect": [5, 108], "estim": [5, 12, 16, 21, 28, 32, 38, 43, 46, 55, 59, 65, 70, 76, 80, 81, 84, 85, 90, 96, 98, 100, 107, 108, 109, 110, 111, 113, 114, 116, 117, 118, 119, 120, 122, 123, 124, 125, 127, 129, 130, 133, 135, 136, 139, 144, 145, 147, 150, 151, 152, 154, 170, 177, 179, 182, 185], "actual": [5, 73, 77, 80, 82, 91, 100, 101, 102, 117, 132, 142, 145, 152], "gener": [5, 16, 17, 21, 22, 27, 28, 46, 49, 55, 57, 58, 59, 65, 76, 78, 79, 80, 81, 83, 84, 85, 86, 88, 89, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 103, 104, 106, 107, 109, 110, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 129, 130, 133, 135, 136, 137, 139, 141, 142, 144, 145, 147, 148, 149, 150, 151, 152, 154, 155, 156, 162, 165, 168, 177, 179, 185], "As": [5, 28, 46, 73, 76, 79, 80, 81, 84, 85, 87, 89, 92, 94, 96, 97, 98, 103, 104, 106, 107, 108, 110, 115, 116, 132, 133, 137, 141, 142, 144, 145, 147, 151, 152, 153, 156, 157, 161, 164], "narrow": 5, "spend": [5, 105, 117], "increasingli": 5, "effort": [5, 105], "split": [5, 14, 17, 22, 28, 36, 41, 43, 65, 73, 76, 78, 83, 84, 85, 91, 94, 96, 99, 100, 101, 104, 108, 111, 115, 116, 117, 119, 120, 122, 124, 127, 142, 145, 150, 152, 154, 157, 159, 161, 162, 163, 172, 173], "afford": 5, "trust": [5, 79, 80, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 137, 138, 139, 142, 150, 151, 152, 154, 157, 163, 164], "think": [5, 74, 75, 100, 105, 129, 135, 137], "carefulli": [5, 94], "complet": [5, 14, 28, 35, 46, 96, 98, 124, 127, 132, 133, 149, 152, 156, 179], "futur": [5, 73, 76, 85, 90, 100, 101, 105, 136, 142, 150, 159, 163], "upon": [5, 90, 97, 98], "affect": [5, 100, 133, 137, 141, 150, 157, 159, 162, 163], "live": [5, 133], "sure": [5, 17, 59, 79, 81, 84, 89, 114, 123], "divers": [5, 157], "demograph": [5, 103, 118], "increas": [5, 15, 17, 27, 28, 42, 43, 46, 50, 52, 57, 75, 81, 84, 97, 101, 102, 108, 113, 116, 117, 122, 130, 133, 136, 137, 140, 141, 150, 154, 156, 157, 159, 161, 162, 163, 175, 183], "coverag": 5, "phrase": 5, "recommend": [5, 35, 70, 73, 84], "identifi": [5, 12, 70, 85, 94, 97, 102, 142, 179], "ani": [5, 14, 17, 28, 35, 42, 72, 73, 76, 79, 81, 83, 85, 88, 89, 91, 94, 97, 99, 100, 101, 102, 103, 105, 106, 107, 108, 110, 117, 118, 119, 123, 124, 125, 126, 127, 132, 133, 138, 141, 145, 150, 152, 153, 156, 161, 174, 185], "bia": [5, 36, 50, 55, 57, 108, 129, 130, 132, 135, 136, 145, 165], "acquisit": 5, "full": [5, 7, 8, 17, 35, 45, 47, 48, 49, 51, 53, 61, 65, 66, 76, 77, 81, 82, 84, 96, 101, 117, 122, 124, 126, 127, 140, 148, 152, 155, 166, 179, 182], "chain": [5, 76, 81, 85], "acquir": [5, 12, 21, 32, 38, 55, 102, 105, 170, 182], "fanci": 5, "put": [5, 32, 36, 81, 102, 108, 133, 157], "product": [5, 96, 99, 101, 129, 133, 135, 150], "routin": [5, 17, 94, 185], "debt": 5, "simpler": [5, 13, 46, 85, 137], "easier": [5, 81, 84, 91, 139, 145], "maintain": 5, "less": [5, 28, 43, 85, 94, 100, 103, 107, 108, 109, 110, 117, 118, 126, 130, 132, 133, 135, 136, 137, 139, 150, 156], "power": [5, 28, 35, 105, 117, 118, 129, 133, 135, 139, 157], "drift": 5, "gave": [5, 142], "methodolog": [5, 35, 55, 101, 162], "element": [5, 21, 80, 84, 94, 101, 133, 134, 140, 142], "alwai": [5, 14, 17, 21, 23, 41, 43, 52, 78, 79, 80, 83, 85, 89, 91, 94, 96, 97, 98, 99, 101, 102, 106, 107, 119, 125, 142, 151, 152, 153, 164, 177, 179, 185], "solid": 5, "conclus": [5, 73, 93, 94, 96, 98, 99, 100, 102, 103, 106, 117, 125, 136], "standpoint": 5, "biggest": 5, "shortcom": 5, "cannot": [5, 17, 28, 57, 73, 99, 100, 102, 108, 117, 124, 127, 132, 139, 142, 145, 147, 150, 153, 164, 182], "autom": [5, 73, 165, 183], "domain": 5, "knowledg": [5, 35, 70, 96, 102, 110, 127, 139, 152], "critic": [5, 35, 104], "thing": [5, 73, 84, 85, 90, 100, 139, 150], "oper": [5, 81, 110, 132, 142, 152], "risk": [5, 129, 135], "advertis": 5, "individu": [5, 16, 28, 73, 81, 85, 103, 106, 110, 112, 119, 121, 133, 141, 157, 185], "caus": [5, 17, 50, 57, 73, 84, 88, 89, 117, 133, 150, 152, 179], "wast": [5, 117], "bit": [5, 28, 83, 101, 102, 108, 133, 139, 143, 146, 152], "monei": 5, "annoi": 5, "otherwis": [5, 17, 79, 88, 94, 96, 119, 132, 139, 140, 161], "mostli": [5, 136, 137, 156], "harmless": 5, "medicin": 5, "kill": 5, "logic": [5, 136, 150, 163], "fals": [5, 15, 52, 80, 84, 87, 89, 94, 96, 100, 105, 107, 123, 125, 126, 132, 133, 135, 136, 139, 142, 146, 149, 150, 154, 156, 157, 163, 175, 177], "brain": 5, "tumor": 5, "sent": 5, "surgeri": 5, "veri": [5, 17, 55, 59, 73, 79, 85, 87, 89, 96, 97, 99, 102, 106, 107, 108, 110, 115, 117, 119, 123, 132, 133, 137, 139, 142, 150, 151, 152, 153, 154, 161, 179], "danger": [5, 133, 154], "mr": 5, "confirm": [5, 91, 99, 105, 107, 110, 124, 127, 130, 132, 136, 137, 141, 145, 152, 153], "should": [5, 17, 22, 28, 33, 34, 43, 46, 68, 72, 73, 80, 81, 85, 89, 94, 96, 98, 99, 100, 101, 102, 104, 105, 106, 108, 109, 114, 115, 117, 118, 123, 124, 125, 127, 128, 133, 134, 137, 139, 142, 143, 146, 147, 148, 150, 151, 152, 155, 157, 161, 162, 175, 177, 179, 182, 183], "person": [5, 63, 73, 79, 103, 106, 142, 150], "delai": 5, "life": [5, 73, 124, 127, 145], "save": [5, 185], "treatment": [5, 57], "hospit": [5, 24], "stai": [5, 39, 43, 110, 133], "overcrowd": 5, "unit": [5, 73, 79, 81, 84, 85, 101, 103, 105, 107, 108, 109, 133, 136, 138, 140, 145, 150, 154], "chang": [5, 27, 28, 36, 46, 81, 93, 98, 108, 109, 113, 122, 125, 133, 145, 148, 151, 155, 157, 182, 185], "inpati": 5, "chose": [5, 52, 73, 115, 133], "load": [5, 17, 63, 74, 75, 76, 77, 81, 82, 84, 85, 86, 88, 92, 95, 96, 97, 99, 100, 101, 102, 103, 106, 107, 109, 111, 116, 117, 118, 120, 129, 130, 131, 132, 133, 135, 136, 137, 138, 139, 142, 150, 151, 153, 154, 157, 158, 159, 160, 161, 162, 163, 164, 179, 185], "interest": [5, 48, 73, 74, 75, 91, 93, 98, 99, 101, 102, 103, 105, 107, 110, 115, 129, 132, 135, 136, 138, 139, 141, 142, 145, 147, 150, 151, 152, 153, 154, 156, 163, 185], "focus": [5, 13, 60, 107, 109, 133, 142, 152], "easi": [5, 72, 85, 100, 101, 105, 132, 161], "accumul": 5, "target": [5, 17, 28, 38, 39, 42, 46, 48, 57, 59, 63, 65, 72, 73, 74, 75, 76, 77, 78, 79, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 154, 155, 156, 157, 158, 159, 163, 177, 185], "proxi": [5, 142], "reflect": [5, 101, 110, 119], "ground": [5, 115, 142], "truth": [5, 115, 142], "polici": [5, 24], "uneven": 5, "across": [5, 41, 46, 76, 81, 91, 108, 133, 141, 154, 185], "popul": [5, 73, 101, 103, 107, 108, 142, 156], "eg": 5, "qualiti": [5, 108, 141, 145, 152], "affair": 5, "desir": [5, 78, 83, 119, 133], "qualif": 5, "respons": 5, "women": 5, "pai": [5, 84, 109], "men": 5, "pick": [5, 28, 90, 96, 104, 105, 108, 116, 151, 182], "amplifi": 5, "inequ": 5, "mechan": [5, 28, 80, 81, 152], "die": 5, "naiv": [5, 17, 28, 73, 96, 106, 116, 119], "bad": [5, 88, 97, 127, 133, 142, 179], "health": [5, 35], "fallaci": 5, "compar": [5, 12, 16, 17, 21, 28, 36, 46, 48, 58, 59, 72, 73, 76, 78, 80, 81, 83, 84, 85, 86, 88, 96, 97, 98, 102, 107, 108, 110, 113, 115, 116, 120, 122, 125, 126, 127, 129, 130, 133, 135, 136, 142, 145, 154, 156, 157, 165, 177, 185], "wors": [5, 17, 27, 46, 72, 97, 98, 100, 138, 177], "baselin": [5, 21, 23, 78, 83, 85, 86, 88, 93, 94, 97, 98, 118, 165], "heart": [5, 28, 105, 129, 135], "pressur": 5, "greater": [5, 28, 72, 132], "trigger": 5, "care": [5, 33, 46, 84, 96, 99, 106, 107, 124, 127, 133], "which": [5, 13, 15, 16, 17, 22, 24, 26, 28, 32, 33, 39, 41, 43, 46, 57, 65, 68, 71, 72, 73, 76, 79, 80, 81, 83, 84, 85, 89, 90, 91, 92, 95, 96, 97, 98, 100, 101, 102, 103, 105, 106, 107, 108, 109, 110, 115, 116, 117, 119, 124, 125, 126, 127, 128, 129, 130, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 153, 154, 156, 157, 161, 162, 163, 170, 171, 172, 177, 179, 181, 183, 185], "learner": [5, 13, 109, 110, 115, 117, 118], "predictor": [5, 13, 14, 15, 16, 65, 80, 81, 85, 98, 102, 120, 130, 133, 136, 150, 157], "pure": [5, 127, 163], "benefici": [5, 32, 81, 117, 125, 152, 154, 161], "intervent": [5, 73], "brittl": 5, "interpret": [5, 16, 35, 103, 105, 107, 108, 131, 135, 136, 137, 138, 139, 145, 153, 157, 163], "subject": [5, 73, 142], "caution": [5, 89, 108], "feedback": 5, "loop": [5, 96, 108, 148, 150, 152, 155, 185], "todai": 5, "ai": 5, "alloc": 5, "loan": 5, "screen": [5, 7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "job": [5, 81], "prioritis": 5, "treatement": 5, "law": [5, 28], "enforc": [5, 100, 105, 117, 133, 137], "court": 5, "fairlearn": [5, 73], "assess": [5, 24, 46, 76, 80, 93, 95, 96, 98, 102, 113, 122, 128, 133, 134, 142, 145, 152, 156], "shift": [5, 81, 137], "technolog": [5, 94], "induc": [5, 108, 110, 130, 136, 137], "societi": 5, "though": [5, 98, 116, 144, 147], "difficult": [5, 105, 117, 138, 145], "intersect": [5, 103, 153, 156], "No": [5, 23], "found": [5, 59, 96, 106, 107, 114, 117, 123, 133, 138, 148, 150, 152, 155, 156, 157, 161, 162, 179, 185], "short": [5, 33, 79, 107, 118, 158], "move": [5, 85, 103, 105, 136, 137, 153, 156], "choos": [5, 17, 35, 73, 86, 88, 101, 108, 124, 125, 127, 132, 133, 139, 141, 152, 175, 185], "revolut": 5, "fantast": [5, 127], "opportun": 5, "With": [5, 36, 88, 101, 105, 117, 125, 126, 142, 150, 154, 161], "lift": 5, "roadblock": 5, "hope": [5, 90, 132, 145], "empow": 5, "varieti": [5, 35, 76], "mindset": 5, "dream": 5, "being": [5, 41, 91, 141, 156], "adventur": 5, "navig": [7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "slide": [7, 8, 45, 47, 49, 51, 53, 61, 66, 103, 131, 132, 137, 141, 153, 156, 166], "first": [7, 8, 28, 45, 47, 49, 51, 53, 55, 61, 62, 63, 64, 66, 70, 74, 75, 76, 77, 78, 79, 81, 82, 83, 84, 85, 86, 87, 88, 89, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 114, 115, 116, 117, 118, 119, 121, 123, 124, 125, 126, 127, 129, 132, 133, 135, 138, 139, 141, 142, 143, 144, 145, 146, 147, 150, 152, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 182], "click": [7, 8, 14, 45, 47, 49, 51, 53, 61, 66, 103, 153, 156, 166, 179], "press": [7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "arrow": [7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "go": [7, 8, 12, 21, 28, 32, 35, 38, 45, 47, 49, 51, 53, 55, 61, 66, 70, 76, 77, 82, 84, 92, 94, 96, 97, 100, 101, 102, 103, 105, 107, 108, 115, 118, 125, 141, 142, 147, 150, 157, 166, 170, 182], "next": [7, 8, 45, 47, 49, 51, 53, 57, 61, 66, 79, 80, 84, 90, 92, 93, 97, 98, 99, 100, 109, 115, 117, 118, 129, 132, 133, 135, 137, 139, 151, 157, 166], "previou": [7, 8, 15, 17, 21, 28, 45, 46, 47, 49, 51, 53, 59, 61, 63, 65, 66, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 93, 94, 95, 96, 98, 99, 100, 101, 102, 110, 111, 114, 115, 116, 117, 119, 120, 123, 124, 127, 128, 129, 131, 132, 133, 134, 135, 137, 138, 139, 143, 146, 148, 150, 151, 152, 153, 154, 155, 156, 157, 159, 160, 163, 164, 166, 177, 179, 182, 185], "p": [7, 8, 45, 47, 49, 51, 53, 61, 66, 94, 135, 166], "toggl": [7, 8, 45, 47, 49, 51, 53, 61, 66, 166], "mode": [7, 8, 35, 45, 47, 49, 51, 53, 61, 66, 166], "adapt": [9, 99, 118, 139, 145, 154, 165], "adaboost": [9, 12, 115, 165], "gbdt": [9, 109, 122, 165], "exercis": [9, 10, 11, 18, 28, 29, 30, 31, 35, 37, 40, 44, 46, 58, 62, 64, 67, 79, 84, 117, 132, 138, 139, 150, 152, 157, 165, 167, 176, 177, 178, 180], "m6": [9, 10, 11, 117, 165], "03": [9, 11, 20, 28, 30, 40, 44, 54, 64, 67, 165, 176], "speed": [9, 28, 33, 34, 43, 105, 108, 125, 152, 154, 165], "quiz": [9, 10, 11, 18, 19, 20, 29, 30, 35, 37, 40, 44, 54, 56, 58, 62, 64, 67, 156, 165, 167, 168, 169, 176, 178, 180], "02": [9, 10, 19, 29, 40, 58, 64, 81, 165, 167, 176, 178], "bag": [10, 12, 14, 15, 109, 111, 117, 118, 119, 120, 165], "introductori": [10, 141, 165], "forest": [10, 12, 13, 14, 16, 17, 36, 108, 112, 113, 115, 116, 118, 121, 122, 125, 126, 165], "thi": [12, 13, 17, 21, 22, 26, 28, 32, 33, 35, 38, 39, 42, 46, 48, 49, 55, 57, 59, 70, 71, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 170, 171, 174, 177, 179, 182, 183, 185], "togeth": [12, 13, 67, 72, 80, 81, 88, 93, 94, 96, 98, 109, 115, 165], "ensembl": [12, 13, 16, 17, 28, 57, 85, 87, 89, 108, 109, 110, 113, 115, 116, 117, 119, 120, 121, 122, 123, 125, 126, 147, 148, 150, 152, 154, 155], "famili": [12, 13, 38, 55, 57, 73, 79, 81, 85, 118, 136, 151], "techniqu": [12, 32, 73, 108, 132], "bootstrap": [12, 13, 17, 117, 118, 119, 120, 165], "ii": [12, 21, 76, 80, 138], "belong": [12, 17, 46, 79, 83, 84, 94, 97, 106, 141, 147, 157, 163, 177], "former": [12, 73, 81, 125, 142], "strategi": [12, 13, 17, 21, 22, 23, 24, 28, 59, 76, 78, 80, 83, 86, 88, 90, 91, 93, 94, 96, 98, 99, 100, 101, 104, 109, 110, 113, 116, 119, 122, 129, 130, 134, 135, 136, 138, 142, 145, 152, 182, 185], "later": [12, 17, 73, 79, 80, 90, 97, 101, 116, 131, 135, 137, 139, 142, 149, 150, 152, 156], "hyperparamet": [12, 16, 25, 39, 42, 46, 59, 92, 95, 96, 97, 102, 103, 111, 115, 118, 120, 127, 131, 132, 133, 137, 147, 149, 156, 157, 170, 171, 178, 179, 180, 181, 182, 183], "allow": [12, 13, 28, 43, 65, 73, 76, 79, 81, 84, 92, 97, 101, 104, 107, 108, 117, 124, 127, 128, 133, 134, 139, 140, 141, 143, 146, 152, 154, 157, 161, 162, 163, 170, 179, 182, 183], "skill": [12, 21, 32, 38, 55, 70, 170, 182], "carri": [12, 21, 32, 38, 46, 55, 70, 84, 95, 107, 126, 170, 182], "basic": [12, 21, 32, 35, 38, 49, 55, 70, 99, 101, 108, 120, 145, 170, 182], "usag": [12, 21, 32, 38, 55, 79, 104, 105, 106, 107, 119, 139, 152, 170, 182], "mainli": [12, 13, 21, 32, 38, 73, 104, 157, 170], "around": [12, 21, 32, 38, 73, 79, 90, 94, 97, 100, 101, 102, 105, 108, 135, 137, 142, 145, 170], "overfit": [12, 15, 17, 21, 28, 32, 38, 43, 46, 50, 52, 55, 57, 58, 59, 60, 95, 96, 100, 108, 109, 110, 113, 117, 118, 119, 122, 126, 131, 132, 133, 135, 136, 137, 152, 156, 161, 165, 170, 172, 175], "valid": [12, 17, 21, 22, 23, 24, 25, 28, 32, 33, 38, 39, 43, 46, 55, 56, 57, 59, 64, 65, 72, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 97, 98, 99, 100, 107, 108, 113, 114, 115, 116, 117, 118, 119, 122, 123, 124, 125, 126, 127, 129, 130, 133, 135, 136, 142, 143, 144, 146, 147, 148, 150, 151, 152, 153, 154, 155, 161, 162, 165, 170, 175, 177, 179, 182, 183, 185], "principl": [12, 21, 33, 142], "through": [12, 21, 28, 32, 35, 38, 55, 70, 92, 97, 102, 108, 114, 123, 133, 140, 141, 143, 146, 150, 170, 179, 182], "hour": [12, 21, 38, 55, 70, 73, 76, 78, 79, 80, 81, 83, 84, 85, 103, 130, 136, 150, 151, 154, 170, 182], "saw": [13, 28, 39, 46, 79, 81, 84, 85, 101, 102, 115, 117, 133, 138, 141, 143, 146, 150, 152, 153, 154, 157, 161, 162, 163, 171], "parallel": [13, 15, 103, 115, 152, 153, 156, 179], "sequenti": [13, 15, 52, 81, 116, 117, 153, 179], "intern": [13, 43, 65, 81, 85, 96, 101, 109, 110, 113, 114, 122, 123, 133, 145, 147, 150, 152, 179], "machineri": [13, 109, 115], "art": 13, "learn": [13, 15, 16, 22, 23, 26, 27, 28, 33, 36, 37, 39, 41, 43, 50, 57, 59, 63, 65, 67, 68, 71, 73, 76, 77, 79, 81, 82, 83, 84, 85, 86, 88, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 106, 107, 109, 112, 114, 116, 117, 119, 121, 123, 124, 125, 126, 127, 132, 133, 135, 136, 139, 141, 142, 143, 145, 146, 147, 150, 152, 153, 155, 158, 159, 162, 163, 171, 172, 173, 174, 180, 181, 183, 185], "earli": [13, 28, 113, 114, 122, 123], "stop": [13, 28, 86, 88, 91, 98, 105, 113, 114, 117, 122, 123], "stack": 13, "By": [14, 17, 28, 43, 46, 73, 81, 84, 96, 99, 102, 105, 119, 127, 130, 133, 136, 142, 145, 147, 160, 164, 177], "default": [14, 17, 26, 27, 43, 46, 59, 65, 76, 77, 81, 82, 84, 92, 97, 99, 105, 111, 117, 118, 120, 130, 132, 133, 136, 142, 143, 144, 145, 146, 147, 151, 152, 177, 182], "baggingclassifi": [14, 119], "baggingregressor": [14, 110, 111, 118, 119, 120], "draw": [14, 28, 73, 93, 98, 103, 110, 117, 125, 136, 154, 157, 179], "replac": [14, 28, 46, 84, 104, 105, 107, 110, 132], "without": [14, 35, 37, 46, 59, 73, 77, 80, 82, 91, 94, 96, 97, 100, 101, 104, 105, 106, 117, 118, 125, 126, 129, 130, 131, 133, 135, 136, 137, 138, 142, 143, 145, 146, 163, 165, 185], "d": [14, 15, 16, 17, 19, 24, 26, 28, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 73, 80, 94, 165, 172, 177, 179, 181, 185], "answer": [14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 84, 104, 129, 135, 137, 156, 172, 173, 174, 175, 177, 179, 181, 185], "hint": [14, 27, 28, 46, 59, 72, 74, 75, 77, 78, 82, 83, 87, 89, 129, 131, 133, 135, 137, 159, 163, 185], "base_estim": 14, "decid": [14, 21, 73, 79, 101, 107, 124, 127, 129, 135, 152], "resampl": [14, 50, 72, 96, 105, 109, 130, 133, 136], "perform": [14, 16, 17, 18, 21, 22, 23, 25, 28, 33, 34, 46, 59, 63, 65, 72, 75, 76, 78, 79, 80, 81, 83, 84, 85, 86, 88, 90, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 106, 108, 109, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 124, 125, 126, 127, 129, 130, 133, 135, 136, 139, 142, 144, 145, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 161, 170, 177, 179, 182, 183, 185], "correct": [15, 16, 17, 28, 59, 80, 82, 109, 115, 117, 129, 135, 142, 148, 154, 155, 161, 185], "statement": [15, 17, 26, 28, 41, 72, 179, 185], "simultan": 15, "histogram": [15, 28, 65, 73, 74, 75, 81, 93, 98, 103, 104, 107, 114, 116, 123, 141, 150, 154], "acceler": [15, 28, 105, 116], "subsampl": [15, 107, 119, 124, 127], "origin": [15, 65, 76, 79, 80, 84, 85, 99, 100, 103, 104, 109, 110, 115, 116, 119, 129, 132, 135, 136, 137, 139, 140, 145, 157, 158, 162, 163], "bin": [15, 73, 81, 91, 94, 98, 101, 102, 104, 105, 106, 107, 116, 132, 139, 153, 154], "numer": [15, 46, 63, 67, 68, 70, 71, 72, 73, 74, 75, 76, 77, 78, 80, 82, 83, 84, 93, 98, 101, 103, 104, 105, 107, 129, 130, 132, 133, 135, 136, 139, 149, 150, 151, 156, 165, 177, 185], "tend": [15, 28, 94, 117, 118, 132, 133, 137, 145, 152], "true": [15, 17, 26, 28, 41, 46, 52, 57, 59, 65, 72, 80, 91, 94, 95, 96, 99, 100, 101, 102, 104, 105, 106, 107, 108, 110, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 125, 128, 129, 130, 132, 133, 134, 135, 136, 141, 142, 145, 147, 149, 150, 152, 156, 157, 163, 175, 179, 185], "shallow": [16, 109, 115, 117, 161], "deeper": [16, 91, 92, 95, 97, 101, 102, 109, 111, 112, 113, 116, 117, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164], "exist": [16, 73, 76, 94], "maximum": [16, 28, 81, 84, 96, 102, 139, 142, 150, 154, 157, 159, 160, 162, 163, 164, 172, 175], "depth": [16, 17, 35, 74, 75, 102, 107, 109, 115, 116, 117, 118, 129, 131, 135, 137, 141, 148, 155, 157, 158, 159, 160, 161, 162, 163, 164, 172, 175, 177], "rate": [16, 28, 80, 91, 95, 101, 105, 117, 142, 150, 153, 155], "option": [16, 81, 84, 91, 99, 101, 105, 113, 122, 182, 185], "reduc": [16, 17, 39, 43, 46, 90, 94, 95, 116, 117, 118, 119, 125, 133, 134], "sensit": [16, 50, 137, 142, 153, 156, 171], "notic": [17, 80, 81, 97, 101, 107, 108, 130, 132, 133, 135, 136, 137, 139, 141, 142, 147, 149, 156, 163], "tradit": 17, "panda": [17, 28, 35, 46, 59, 63, 70, 72, 73, 74, 75, 76, 77, 78, 79, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 119, 120, 121, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "pd": [17, 28, 46, 59, 63, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 112, 115, 117, 119, 120, 121, 123, 125, 126, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "read_csv": [17, 28, 46, 59, 63, 72, 73, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 97, 98, 100, 103, 104, 105, 106, 109, 112, 119, 121, 128, 129, 130, 131, 133, 134, 135, 136, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "csv": [17, 28, 46, 59, 63, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 97, 98, 100, 103, 104, 105, 106, 109, 112, 119, 121, 128, 129, 130, 131, 133, 134, 135, 136, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 148, 150, 151, 152, 153, 154, 155, 157, 158, 159, 160, 161, 162, 163, 164, 177, 179, 185], "feature_nam": [17, 104, 107, 112, 121, 128, 130, 132, 133, 134, 136, 138, 140, 141, 157, 158, 160, 161, 162, 163, 164], "culmen": [17, 74, 75, 109, 129, 131, 135, 137, 141, 157, 158, 159, 161, 163, 185], "mm": [17, 75, 109, 112, 121, 128, 129, 131, 134, 135, 137, 138, 140, 141, 157, 158, 159, 160, 161, 162, 163, 164, 185], "flipper": [17, 112, 121, 128, 129, 134, 135, 138, 140, 158, 160, 161, 162, 164, 185], "target_nam": [17, 28, 46, 59, 72, 76, 78, 80, 81, 83, 84, 85, 86, 87, 88, 89, 90, 103, 104, 105, 112, 119, 121, 128, 129, 132, 133, 134, 135, 138, 140, 148, 150, 151, 152, 154, 155, 160, 161, 162, 164, 177, 185], "bodi": [17, 74, 75, 105, 112, 121, 128, 129, 134, 135, 138, 140, 158, 160, 161, 162, 164, 185], "mass": [17, 28, 112, 121, 128, 129, 134, 135, 138, 140, 158, 160, 161, 162, 164, 185], "dropna": [17, 129, 135, 185], "frac": [17, 28], "random_st": [17, 36, 79, 81, 83, 85, 91, 92, 96, 97, 98, 99, 100, 101, 102, 108, 109, 110, 111, 112, 113, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 126, 127, 131, 132, 133, 135, 137, 139, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 161, 163, 177], "reset_index": [17, 131, 137, 141], "drop": [17, 28, 36, 46, 59, 72, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 92, 93, 97, 98, 100, 103, 104, 105, 106, 107, 108, 119, 130, 136, 142, 143, 144, 145, 146, 147, 148, 150, 152, 154, 155, 177], "data": [17, 19, 21, 22, 23, 24, 27, 28, 35, 38, 39, 42, 43, 46, 57, 59, 65, 68, 70, 71, 72, 74, 75, 77, 78, 82, 83, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 173, 177, 179, 182, 185], "therefor": [17, 28, 72, 73, 76, 81, 83, 85, 91, 94, 95, 96, 98, 101, 102, 107, 109, 115, 116, 117, 119, 125, 126, 127, 133, 134, 135, 139, 141, 142, 145, 147, 152, 156, 161], "randomli": [17, 28, 96, 98, 101, 108, 110, 119, 154], "shuffl": [17, 36, 79, 94, 96, 99, 100, 101, 108, 114, 123, 142, 145], "break": [17, 94, 133], "spuriou": 17, "troubl": [17, 99], "notebook": [17, 22, 35, 46, 63, 72, 75, 77, 78, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 93, 94, 95, 96, 98, 99, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 113, 115, 116, 117, 118, 119, 120, 122, 125, 126, 129, 132, 133, 135, 136, 138, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 152, 153, 154, 156, 157, 158, 159, 161, 162, 163, 164, 165, 179], "outsid": [17, 150, 157, 160, 164], "scope": [17, 73, 103, 139, 141, 145, 147], "regressor": [17, 23, 27, 28, 39, 43, 46, 80, 81, 91, 95, 100, 101, 102, 110, 111, 114, 115, 116, 117, 118, 119, 120, 122, 123, 133, 139, 145, 171, 174, 177], "sklearn": [17, 28, 43, 46, 59, 72, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 131, 132, 133, 135, 136, 137, 138, 139, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 154, 155, 156, 157, 159, 161, 162, 163, 164, 177, 179, 181, 185], "randomforestregressor": [17, 108, 115, 117, 119, 121, 122], "except": [17, 86, 88, 143, 146, 157, 163], "exact": [17, 28, 115], "fold": [17, 24, 28, 46, 59, 72, 76, 94, 99, 101, 108, 114, 123, 125, 126, 127, 128, 129, 130, 133, 134, 135, 136, 144, 147, 152, 154, 156, 177, 185], "model_select": [17, 28, 46, 59, 76, 78, 79, 81, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 107, 108, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 131, 133, 135, 136, 137, 141, 142, 143, 145, 146, 147, 148, 149, 150, 151, 152, 154, 155, 156, 157, 159, 161, 163, 177, 179, 185], "cross_valid": [17, 28, 46, 59, 65, 72, 76, 84, 85, 86, 87, 88, 89, 90, 91, 92, 97, 98, 99, 102, 107, 108, 114, 115, 116, 118, 123, 125, 126, 133, 135, 136, 143, 144, 146, 147, 150, 151, 152, 177, 185], "cv": [17, 28, 46, 65, 72, 76, 85, 90, 91, 92, 94, 95, 96, 97, 98, 99, 100, 101, 102, 108, 114, 118, 123, 126, 133, 135, 136, 143, 146, 147, 150, 152, 154, 155, 161, 177, 179, 185], "store": [17, 28, 73, 79, 81, 91, 93, 98, 99, 101, 104, 105, 106, 110, 118, 125, 126, 132, 133, 138, 149, 150, 156, 185], "return_train_scor": [17, 28, 59, 102, 133], "count": [17, 46, 59, 72, 73, 75, 79, 81, 83, 84, 91, 94, 99, 104, 105, 106, 107, 130, 136, 157, 177, 185], "rang": [17, 39, 46, 59, 65, 72, 79, 81, 94, 96, 99, 101, 103, 105, 106, 107, 108, 110, 132, 133, 140, 152, 153, 154, 156, 158, 160, 163, 164, 174, 177, 179, 185], "substanti": [17, 177, 185], "almost": [17, 46, 72, 80, 89, 101, 108, 137, 140, 145, 152, 163, 177], "100": [17, 46, 59, 79, 80, 91, 95, 98, 101, 102, 107, 108, 110, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 124, 125, 126, 127, 129, 130, 131, 132, 133, 135, 136, 137, 139, 142, 145, 149, 154, 155, 156, 163, 185], "again": [17, 92, 97, 103, 108, 110, 113, 122, 137, 139, 142, 153], "curv": [17, 41, 57, 59, 92, 97, 113, 122, 135, 140, 141, 142, 165], "n_estim": [17, 109, 110, 113, 115, 116, 117, 118, 119, 120, 121, 122], "numpi": [17, 28, 35, 59, 63, 65, 70, 81, 91, 94, 95, 97, 98, 99, 102, 103, 105, 107, 108, 109, 110, 115, 116, 121, 122, 124, 125, 127, 128, 132, 133, 134, 135, 136, 138, 139, 140, 142, 144, 145, 147, 153, 156, 161, 162, 163, 164, 179], "np": [17, 28, 46, 59, 91, 92, 94, 95, 97, 98, 99, 102, 103, 105, 107, 108, 109, 110, 113, 115, 116, 119, 121, 122, 124, 125, 127, 128, 129, 132, 133, 134, 135, 136, 138, 139, 140, 142, 144, 145, 147, 149, 153, 156, 161, 162, 163, 164, 174, 179], "arrai": [17, 28, 41, 42, 59, 65, 76, 79, 80, 81, 82, 84, 85, 94, 95, 101, 102, 103, 108, 109, 110, 113, 116, 121, 122, 129, 133, 135, 136, 141, 142, 146, 150, 157], "50": [17, 28, 32, 59, 73, 77, 79, 81, 82, 91, 98, 99, 101, 102, 104, 105, 106, 107, 113, 115, 116, 117, 118, 119, 122, 129, 130, 133, 135, 136, 145, 150, 151, 153, 154], "200": [17, 59, 72, 94, 105, 115, 116, 132, 154], "500": [17, 59, 84, 85, 88, 101, 105, 107, 145, 153, 154], "1_000": [17, 28, 94, 113, 122], "decreas": [17, 28, 43, 46, 52, 57, 81, 108, 116, 125, 133, 145, 150, 156], "becom": [17, 92, 95, 97, 102, 108, 116, 117, 150, 154, 163, 179], "reach": [17, 95, 102, 113, 122, 150, 153, 161, 179], "plateau": [17, 95, 113, 122], "experi": [17, 35, 46, 59, 70, 78, 83, 92, 94, 95, 96, 97, 101, 102, 107, 113, 114, 116, 122, 123, 131, 137, 143, 146, 156, 159, 162, 163], "instead": [17, 28, 46, 59, 72, 76, 80, 81, 84, 86, 87, 88, 89, 91, 92, 94, 95, 97, 98, 101, 105, 110, 115, 117, 118, 119, 129, 130, 133, 135, 136, 139, 142, 143, 144, 145, 146, 147, 148, 149, 152, 153, 154, 155, 156, 157, 158, 159, 162, 163, 177], "max_depth": [17, 102, 109, 110, 113, 114, 115, 117, 118, 122, 123, 139, 157, 162, 163, 164, 177], "gap": [17, 102, 133], "begin": [17, 80, 113, 122, 132, 133, 152], "consid": [17, 42, 59, 68, 72, 84, 100, 102, 104, 107, 108, 109, 115, 116, 117, 119, 132, 133, 139, 143, 146, 157, 163, 172, 181, 185], "none": [17, 46, 92, 94, 97, 101, 104, 105, 106, 107, 110, 113, 117, 118, 122, 132, 139, 153, 185], "rf_1_tree": 17, "cv_results_tre": 17, "train_scor": [17, 28, 102, 133], "return": [17, 27, 28, 41, 43, 65, 76, 79, 80, 84, 86, 88, 103, 108, 110, 115, 128, 133, 134, 140, 141, 142, 150, 151, 153, 154, 179], "83120264": 17, "83309064": 17, "83195043": 17, "84834224": 17, "85790323": 17, "86235297": 17, "84791111": 17, "85183089": 17, "82241954": 17, "85045978": 17, "perfect": [17, 41, 55, 91, 101, 108, 115, 124, 126, 127, 142, 161], "r2": [17, 27, 100, 107, 118, 123, 145, 147], "surpris": [17, 83, 94, 99, 100, 124, 127, 157, 163], "memor": [17, 80, 100, 101, 102], "expect": [17, 28, 65, 73, 75, 79, 84, 85, 96, 99, 100, 101, 107, 110, 117, 127, 129, 133, 135, 136, 145, 147, 152, 156, 161], "automat": [17, 65, 73, 79, 81, 84, 86, 88, 101, 151, 152, 177, 182], "prevent": [17, 42, 84, 88, 110, 117, 179], "max_it": [17, 28, 81, 84, 85, 88, 94, 116, 117, 123, 130, 136, 137], "recal": [17, 26, 59, 84, 96, 101, 116, 118, 131, 137, 142, 151, 152, 154, 156, 185], "averag": [17, 28, 59, 80, 91, 94, 98, 99, 101, 105, 107, 108, 110, 115, 116, 117, 118, 119, 122, 125, 133, 137, 138, 142, 145, 147, 185], "small": [17, 39, 72, 75, 76, 85, 94, 97, 99, 101, 102, 108, 110, 115, 116, 117, 129, 131, 133, 135, 137, 139, 148, 150, 155, 179], "behav": [17, 92, 97, 98, 131, 133, 137, 177], "high": [17, 28, 36, 50, 52, 57, 63, 73, 75, 79, 83, 84, 97, 102, 103, 104, 105, 106, 107, 108, 132, 137, 141, 150, 151, 154], "optimum": 17, "m7": [18, 19, 20, 29, 30, 165], "stratif": [19, 165], "framework": [21, 22, 33, 55, 56, 95, 99, 102, 114, 123, 144, 147, 152, 165, 182], "keep": [21, 28, 73, 90, 94, 101, 103, 104, 105, 107, 108, 125, 126, 127, 129, 131, 135, 136, 137, 139, 141, 152, 154], "mind": [21, 73, 101, 108, 117, 124, 125, 126, 127, 154], "metric": [21, 26, 28, 65, 76, 80, 95, 100, 101, 106, 110, 111, 113, 120, 121, 122, 125, 129, 133, 134, 135, 138, 139, 143, 144, 145, 146, 147, 153, 162, 165, 185], "besid": [21, 22, 32, 38, 79, 85, 87, 89, 91, 94, 95, 114, 123, 125, 157, 170, 183], "insight": [21, 32, 35, 46, 63, 73, 100, 102, 110, 124, 127, 140, 141, 145, 154], "addit": [21, 28, 42, 68, 76, 80, 84, 101, 105, 107, 113, 114, 118, 119, 122, 123, 133, 135, 139, 140, 142, 145, 150, 151, 152, 154, 156, 177], "necess": [21, 101], "appropri": [21, 73, 103], "nest": [21, 22, 25, 117, 133, 148, 150, 152, 155, 165, 177, 183, 185], "wise": [22, 132, 152, 153, 183], "encount": [22, 72, 84, 86, 88, 99], "show": [22, 38, 73, 74, 75, 76, 78, 79, 80, 81, 82, 83, 84, 85, 90, 95, 96, 99, 101, 102, 103, 104, 107, 109, 110, 115, 117, 118, 119, 126, 128, 129, 132, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 146, 150, 151, 152, 153, 154, 156, 157, 159, 162, 163, 164, 170, 179, 182], "comparison": [22, 39, 96, 118, 142], "remov": [23, 34, 46, 73, 105, 108, 116, 127, 129, 135, 150, 153, 185], "dummi": [23, 59, 78, 83, 88, 91, 93, 98, 142, 145], "reli": [23, 73, 79, 81, 108, 109, 126, 142], "ye": [23, 41, 42], "whatev": [23, 153], "chosen": [23, 84, 88, 106, 113, 122, 145, 183], "record": [24, 28, 48, 73, 80, 84, 101, 105, 106, 108], "suppos": [24, 84, 94], "imbalanc": [24, 59, 73, 98, 133, 142, 185], "addition": [24, 81, 103, 133], "suspect": 24, "systemat": [24, 50, 57, 72, 97, 145, 151], "bias": [24, 132], "due": [24, 28, 83, 85, 99, 115, 119, 147, 157], "factor": [24, 28, 91, 137, 145], "devic": [24, 28], "socioeconom": 24, "most": [24, 28, 46, 59, 73, 79, 80, 83, 84, 88, 89, 92, 93, 94, 97, 98, 101, 104, 108, 109, 110, 116, 125, 130, 133, 136, 137, 138, 141, 142, 143, 146, 150, 153, 157, 163, 172, 174, 177, 179], "suitabl": 24, "abil": [24, 102, 142, 148, 155], "stratifi": [24, 93, 98, 99, 185], "leav": [24, 80, 92, 97, 103, 117, 154, 161, 163], "inner": [25, 96, 114, 123, 133, 152, 177], "outer": [25, 96, 114, 122, 123, 132, 133, 152, 177, 185], "balanc": [26, 59, 102, 117, 133, 142, 143, 145, 146, 185], "roc": [26, 142], "auc": [26, 142], "precis": [26, 28, 57, 85, 117, 140, 142, 143, 146, 150], "regular": [26, 36, 38, 39, 43, 46, 108, 110, 132, 154, 162, 165, 183], "assum": [26, 28, 42, 43, 52, 68, 88, 99, 100, 108, 133, 139, 141, 177], "logist": [26, 40, 41, 43, 65, 79, 81, 84, 85, 86, 87, 88, 89, 92, 93, 94, 97, 98, 99, 124, 127, 130, 131, 136, 137, 141, 142, 151, 157, 165], "stronger": [26, 105, 110, 137, 156], "lead": [26, 57, 73, 84, 85, 88, 94, 100, 103, 115, 117, 118, 119, 133, 135, 136, 137, 142, 149, 152, 153, 154, 156], "lower": [26, 27, 28, 46, 52, 88, 94, 95, 98, 101, 103, 105, 108, 117, 122, 133, 135, 137, 142, 145, 147, 152, 156, 161, 163], "r": [27, 97, 100, 101, 107, 108, 144, 145, 147], "absolut": [27, 28, 46, 91, 95, 101, 102, 111, 112, 113, 115, 116, 120, 121, 122, 129, 130, 134, 135, 136, 138, 139, 144, 145, 147], "median": [27, 90, 91, 96, 101, 107, 108, 118, 123, 125, 126, 133, 136, 145, 146, 173, 174], "cross_val_scor": [27, 59, 94, 96, 100, 101, 119, 127, 143, 144, 146, 147, 148, 155], "model_a": 27, "neg_mean_squared_error": [27, 133, 147], "strictli": 27, "model_b": 27, "rememb": [27, 46, 73, 103, 110, 129, 130, 135, 136, 137, 181, 185], "alia": 27, "neg": [27, 28, 46, 65, 101, 102, 103, 108, 133, 134, 140, 142], "guarante": [27, 108], "either": [27, 68, 80, 94, 97, 99, 124, 125, 127, 134, 141, 142, 145], "open": [28, 35, 46, 59, 72, 77, 79, 82, 100, 104, 105, 177], "bike_rid": [28, 105], "command": [28, 46, 59, 72, 177, 185], "cycl": [28, 105], "index_col": [28, 100, 105, 153, 154, 179], "parse_d": [28, 100, 105], "index": [28, 46, 73, 80, 94, 99, 100, 105, 108, 110, 116, 125, 129, 133, 135, 137, 141, 146, 150, 152, 157], "appendix": [28, 91, 92, 95, 97, 101, 102, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164], "remind": 28, "cheap": [28, 48, 85], "sensor": [28, 73, 105], "gp": [28, 105], "cyclist": [28, 105], "meter": [28, 105], "expens": [28, 72, 116, 150, 183], "blindli": 28, "introduc": [28, 35, 80, 81, 101, 108, 110, 125, 129, 130, 133, 135, 136, 139, 140, 145, 147, 165], "flavor": 28, "classic": 28, "newton": 28, "second": [28, 36, 72, 81, 84, 87, 89, 93, 98, 99, 105, 109, 115, 116, 132, 145, 154, 161, 163], "p_": 28, "meca": 28, "rho": 28, "sc_x": 28, "v_": 28, "c_r": 28, "mg": 28, "co": 28, "alpha": [28, 43, 46, 73, 94, 97, 107, 108, 109, 110, 115, 121, 131, 132, 133, 134, 137, 138, 139, 140, 141, 145, 157, 161, 162, 163, 164], "sin": 28, "ma": 28, "v_d": 28, "air": 28, "densiti": [28, 132], "kg": [28, 158], "m": [28, 84, 94], "frontal": 28, "c_x": 28, "drag": 28, "coeffici": [28, 36, 39, 42, 46, 107, 130, 131, 133, 135, 136, 137, 139, 140, 141, 145, 172], "v_a": 28, "roll": 28, "rider": 28, "bicycl": 28, "standard": [28, 46, 73, 76, 81, 85, 86, 88, 93, 94, 96, 98, 101, 102, 108, 114, 123, 133, 142, 152], "graviti": 28, "radian": 28, "equat": [28, 138, 141], "complex": [28, 43, 60, 79, 85, 86, 88, 90, 95, 104, 115, 130, 136, 137, 143, 146, 161, 162], "term": [28, 73, 80, 81, 100, 101, 110, 112, 115, 116, 119, 121, 125, 130, 136, 140, 157, 162], "within": [28, 33, 76, 94, 96, 99, 101, 107, 108, 110, 114, 116, 123, 125, 132, 133, 144, 147, 152, 153, 157, 164], "parenthesi": 28, "produc": [28, 163], "fight": [28, 38], "wind": 28, "resist": 28, "tire": 28, "floor": 28, "third": [28, 115, 132, 135], "hill": 28, "forward": [28, 105], "fourth": 28, "last": [28, 46, 68, 84, 99, 104, 106, 130, 132, 133, 136, 139, 141, 142, 143, 146], "hi": [28, 106], "simplifi": [28, 46, 100, 101, 125, 141, 156, 158, 177], "beta_": 28, "closer": [28, 43, 99, 105, 131, 133, 137, 141], "previous": [28, 46, 72, 84, 85, 92, 97, 100, 104, 109, 115, 116, 117, 118, 129, 132, 135, 137, 139, 141, 145, 148, 150, 152, 154, 155, 161, 162], "part": [28, 73, 84, 85, 96, 102, 108, 116, 127, 128, 133, 134, 137, 142, 157, 159, 163], "cube": 28, "multipli": [28, 136, 147], "sine": 28, "angl": 28, "arc": 28, "tangent": 28, "arctan": 28, "ourself": [28, 142], "clip": 28, "brake": 28, "preprocess": [28, 35, 46, 59, 64, 65, 72, 73, 76, 84, 85, 86, 87, 88, 89, 90, 92, 94, 97, 98, 99, 103, 107, 108, 110, 116, 119, 131, 132, 133, 135, 136, 137, 139, 141, 145, 148, 150, 151, 152, 154, 155, 156, 165, 177, 179, 181, 185], "linear_model": [28, 43, 46, 72, 76, 79, 81, 84, 85, 86, 88, 90, 94, 98, 99, 107, 108, 110, 127, 131, 132, 133, 135, 136, 137, 138, 139, 141, 142, 145, 147, 151, 157, 162, 164, 177, 179, 181], "ridgecv": [28, 46, 107, 108, 133], "shufflesplit": [28, 91, 92, 93, 95, 97, 98, 99, 100, 101, 102, 133], "n_split": [28, 91, 95, 96, 98, 99, 100, 101, 102, 108, 123, 133, 146, 152], "mae": [28, 122, 129, 134, 135, 147], "return_estim": [28, 46, 101, 107, 108, 114, 118, 123, 125, 130, 133, 136, 152, 185], "subsequ": [28, 70, 94, 102, 114, 117, 123, 124, 127, 129, 130, 132, 133, 135, 136, 181, 182], "Be": [28, 73, 84, 85, 86, 88, 102, 117, 150, 151, 177], "awar": [28, 32, 33, 73, 80, 84, 85, 86, 88, 90, 102, 104, 117, 136, 150, 151, 177], "investig": [28, 81, 100, 108, 111, 114, 118, 120, 123, 125, 157], "consequ": [28, 99, 110, 116, 119], "003": [28, 76, 84, 87, 88, 89, 151, 152], "obtain": [28, 46, 59, 72, 76, 80, 84, 85, 86, 88, 90, 94, 96, 98, 100, 101, 105, 107, 108, 111, 117, 118, 120, 127, 131, 132, 133, 137, 138, 142, 145, 152, 153, 154, 155, 179], "closest": [28, 80, 132], "watt": [28, 105], "70": [28, 73, 90, 104], "90": [28, 73, 79, 81, 91, 94, 99, 103, 134], "neg_mean_absolute_error": [28, 91, 95, 101, 102, 115, 116, 117, 120, 122, 129, 135, 147], "request": [28, 84, 116, 133], "h": [28, 108], "beta": 28, "cadenc": [28, 105], "turn": [28, 72, 105, 129, 135], "pedal": [28, 105], "rotat": [28, 73, 94, 103, 105], "per": [28, 73, 76, 78, 79, 80, 81, 83, 84, 85, 96, 101, 103, 105, 107, 116, 119, 130, 136, 139, 142, 150, 151, 152, 154], "minut": [28, 32, 85, 105, 154], "beat": [28, 105], "1000": [28, 46, 85, 105, 117, 122, 123, 137, 142, 144, 145, 147, 156], "activ": [28, 68, 136, 153, 179], "early_stop": [28, 117, 123], "60": [28, 73, 80, 81, 90, 104, 105, 108, 110, 120, 153, 154, 161, 163], "80": [28, 72, 90, 91, 104, 148, 155], "consider": [28, 117, 140], "test_scor": [28, 76, 84, 85, 87, 88, 89, 90, 91, 94, 96, 97, 98, 99, 100, 101, 102, 107, 115, 116, 118, 123, 125, 126, 127, 133, 135, 136, 151, 152, 155, 157, 163], "dictionari": [28, 65, 76, 101], "made": [28, 34, 41, 73, 92, 94, 97, 100, 101, 106, 115, 142, 147, 151], "ignor": [28, 72, 84, 85, 87, 88, 89, 90, 91, 108, 136, 156], "datafram": [28, 46, 59, 72, 73, 74, 75, 79, 80, 81, 84, 85, 90, 91, 93, 94, 96, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 110, 115, 117, 118, 120, 121, 123, 125, 126, 132, 133, 136, 139, 140, 141, 142, 146, 147, 150, 152, 153, 154, 156, 157, 161, 162, 163, 164, 177, 185], "account": [28, 80, 91, 94, 104, 108, 132, 136, 142, 177], "date": [28, 85, 94, 104, 105], "hesit": 28, "uniqu": [28, 59, 79, 84, 94, 99, 110, 116, 150, 154, 163], "dai": 28, "datetimeindex": [28, 105], "went": 28, "df": [28, 72, 153], "capac": [28, 95], "leaveonegroupout": [28, 100], "had": [28, 101, 108, 135, 139, 141], "indic": [28, 73, 85, 94, 101, 105, 107, 108, 110, 125, 128, 133, 134, 136, 156], "differenti": [28, 70, 73, 152, 158], "integ": [28, 68, 76, 79, 84, 86, 88, 94, 104, 106, 108, 110, 130, 136, 142, 154, 156, 185], "align": [28, 103, 106, 132, 137, 139, 154], "pessimist": 28, "optimist": [28, 80, 94, 96, 101], "deviat": [28, 46, 76, 81, 94, 96, 101, 102, 108, 114, 123, 133, 152], "analys": [28, 79, 108], "reus": [28, 128, 134, 143, 146, 152], "train_indic": 28, "test_indic": 28, "data_linear_model_train": 28, "data_linear_model": 28, "iloc": [28, 82, 99, 105, 107, 108, 109, 110, 115, 142, 150, 161], "data_linear_model_test": 28, "data_train": [28, 79, 81, 85, 100, 101, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 160, 162, 163, 164], "data_test": [28, 79, 80, 81, 82, 85, 100, 101, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 161, 162, 163, 164], "target_train": [28, 79, 81, 83, 85, 99, 100, 101, 110, 111, 112, 113, 115, 117, 120, 121, 122, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 160, 162, 163, 164], "target_test": [28, 79, 80, 81, 82, 83, 85, 99, 100, 101, 111, 112, 113, 117, 120, 121, 122, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 163], "scatter": [28, 75, 103, 107, 110, 112, 115, 121, 132, 136, 158, 160, 162, 163, 164], "catastroph": [28, 81], "portion": 28, "time_slic": 28, "slice": 28, "2020": [28, 105], "00": [28, 81, 101, 105, 134, 138, 150], "05": [28, 30, 67, 91, 94, 98, 99, 100, 105, 107, 110, 115, 118, 121, 122, 131, 133, 136, 137, 142, 152, 157, 161, 163, 165], "data_test_linear_model_subset": 28, "data_test_subset": [28, 127], "target_test_subset": 28, "pm": 28, "until": [28, 117, 142], "accur": [28, 42, 109, 133, 142], "motiv": [32, 118], "known": [32, 73, 106, 107, 133, 134, 141, 142, 145, 150, 157], "caveat": [32, 124, 127, 152, 165], "practic": [32, 57, 73, 76, 79, 80, 83, 90, 96, 98, 99, 101, 117, 119, 133, 142, 143, 145, 146, 152, 153, 154], "magic": [33, 100], "tool": [33, 35, 76, 85, 103, 110, 141, 152, 154], "margin": [33, 94, 108], "gain": [33, 35, 63, 73, 76, 78, 79, 80, 81, 83, 84, 85, 95, 103, 111, 120, 124, 125, 127, 130, 136, 141, 145, 150, 151, 154], "tackl": [33, 57, 126, 130, 136], "selector": [33, 84, 85, 86, 87, 88, 89, 124, 127, 136, 148, 150, 152, 154, 155], "recurs": 33, "main": [34, 46, 76, 81, 87, 89, 117, 125, 135, 139, 156, 165], "advantag": [34, 76, 125, 132], "fine": [34, 39, 85, 89, 152, 156, 182], "noisi": [34, 60, 102, 108, 110, 137, 156, 161], "teach": [35, 49], "beginn": 35, "strong": [35, 103, 108, 137, 156], "background": 35, "bring": 35, "vast": 35, "busi": 35, "intellig": 35, "industri": 35, "scientif": [35, 119], "discoveri": 35, "pillar": 35, "modern": 35, "field": [35, 73, 185], "central": 35, "easili": [35, 72, 73, 80, 81, 84, 132, 135, 136, 161], "yet": [35, 72, 85, 87, 89, 133, 139], "dovetail": 35, "ecosystem": 35, "languag": 35, "step": [35, 46, 59, 73, 79, 80, 81, 85, 90, 117, 119, 125, 126, 129, 130, 131, 133, 135, 136, 137, 139, 141, 144, 147, 150, 151, 152, 154, 185], "lesson": [35, 151], "fundament": [35, 55, 97, 145], "stone": 35, "artifici": 35, "mine": 35, "cookbook": 35, "failur": [35, 55, 136], "session": [35, 152, 154], "novemb": 35, "8th": [35, 73, 84, 136], "2023": 35, "remain": [35, 79, 80, 99, 100, 108, 125, 133, 154], "self": [35, 79, 80, 84, 96, 136, 154], "pace": [35, 101, 107], "enrol": 35, "execut": [35, 129, 135, 148, 154, 155, 179], "platform": 35, "purpos": [35, 84, 85, 96, 98, 100, 101, 102, 112, 121, 124, 125, 127, 131, 133, 137, 142, 151, 177], "educ": [35, 73, 79, 84, 85, 86, 87, 88, 89, 103, 119, 130, 136, 141, 148, 150, 152, 154, 155], "prior": [35, 70], "matplotlib": [35, 70, 73, 81, 91, 94, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 134, 136, 137, 138, 140, 141, 142, 145, 146, 157, 161, 162, 163, 164], "quick": [35, 70, 73, 104, 107, 116, 158], "publicli": 35, "cite": 35, "project": [35, 103, 153], "zenodo": 35, "archiv": [35, 90, 94], "doi": 35, "5281": 35, "7220306": 35, "repositori": [35, 101, 107], "quizz": 35, "inria": 35, "publish": [35, 101, 107], "static": 35, "rocket": 35, "top": [35, 73, 142, 153, 154, 160, 161, 164], "interact": [35, 46, 73, 103, 105, 129, 130, 133, 135, 136, 139, 153, 154, 156, 179], "cell": [35, 73, 77, 79, 81, 82, 85, 90, 96, 101, 109, 115, 116, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "binder": 35, "video": [35, 90, 172], "youtub": 35, "playlist": 35, "channel": 35, "www": [35, 73, 80, 101, 107], "pl2oka_2qdj": 35, "m44koooi7x8tu85wr4ez4f": 35, "version": [35, 81, 104, 105, 116, 118, 132, 142, 154, 159, 163], "host": [35, 154], "fun": 35, "infer": [36, 105, 125, 136, 140, 182], "importance_permut": 36, "correl": [36, 46, 73, 103, 106, 108, 119, 125, 133, 139], "divid": [36, 81, 91, 94, 102, 134, 142, 152, 154], "receiv": [36, 142], "cardin": [36, 84, 108], "independ": [36, 84, 94, 96, 100, 110, 116, 117, 132, 133, 142, 145, 151, 154], "could": [36, 59, 72, 73, 79, 80, 81, 83, 84, 85, 91, 92, 94, 96, 97, 98, 100, 101, 102, 104, 105, 106, 107, 108, 109, 116, 118, 119, 123, 125, 127, 128, 132, 133, 134, 136, 138, 139, 140, 141, 142, 143, 145, 146, 151, 152, 153, 156, 161], "m4": [37, 40, 44, 165], "parametr": [38, 128, 134, 138, 140, 154, 157, 162, 164, 171], "implic": 38, "dimension": [38, 65, 70, 73, 94, 103, 124, 127, 129, 132, 135, 137, 141, 150, 153, 157], "effect": [38, 43, 46, 58, 59, 81, 92, 96, 97, 107, 108, 110, 117, 118, 119, 131, 136, 137, 159, 162, 163, 165], "relationship": [38, 73, 79, 84, 85, 100, 103, 108, 110, 132, 133, 136, 139, 140, 142, 150, 152, 154, 158], "adjust": [39, 46, 57, 84, 129, 135, 150, 153, 157], "successfulli": [39, 98, 115, 132], "scale": [39, 43, 46, 59, 71, 72, 81, 84, 85, 92, 94, 97, 101, 110, 116, 130, 132, 136, 145, 150, 151, 154, 156, 177, 183, 185], "approxim": [39, 59, 76, 80, 81, 89, 91, 94, 98, 110, 122, 132, 133, 135, 137, 156, 185], "dynam": 39, "linearli": [39, 41, 43, 79, 103, 132, 139], "extra": [39, 72, 105, 137, 143, 146, 152], "Is": [41, 42, 78, 83, 104], "linearregress": [41, 43, 133, 135, 138, 139, 145, 147, 162, 164, 177], "coef_": [41, 42, 46, 107, 108, 126, 131, 133, 136, 137, 138, 139, 141, 151, 182], "intercept_": [41, 42, 138, 139], "boundari": [41, 132, 141, 157, 159, 161, 162, 163], "predict_proba": [41, 43, 81, 131, 132, 137, 141, 142, 147, 150, 157, 159, 163], "probabl": [41, 90, 101, 107, 108, 109, 131, 132, 135, 137, 147, 157, 179], "extract": [42, 76, 94, 105, 108, 110, 133, 150, 154, 185], "straight": [42, 85, 132, 137, 139, 141, 162], "float": [42, 105, 107, 116, 130, 136, 154], "express": [42, 46, 60, 72, 89, 92, 97, 101, 107, 108, 115, 129, 132, 135, 137, 139, 153, 156, 161, 179], "ensur": [42, 43, 94, 96, 99, 103, 117, 131, 132, 137], "extrapol": [42, 132, 160, 164, 171], "regardless": [42, 103, 132, 133, 157], "inher": [42, 108], "robust": [43, 89, 101, 108, 118, 133], "outlier": [43, 91, 107, 133, 147, 161], "wide": [43, 79], "forc": [43, 81, 87, 89, 109, 116, 119, 133, 138], "penal": [43, 133], "scientist": [43, 96], "prepar": 43, "plan": [43, 159, 163], "strength": [43, 46, 131, 133, 137, 154], "penalti": [43, 46, 108, 137], "magnitud": [43, 108, 130, 131, 133, 136, 137, 172], "l2": 43, "confid": [43, 131, 132, 137, 141, 142], "ames_housing_no_miss": [46, 72, 104, 133, 177], "ames_h": [46, 72, 90, 104, 133, 144, 145, 147, 177], "salepric": [46, 72, 90, 104, 133, 144, 145, 147, 177], "numerical_featur": [46, 72, 104, 177], "lotfrontag": [46, 72, 90, 104, 133, 177], "lotarea": [46, 72, 90, 104, 133, 177], "masvnrarea": [46, 72, 104, 177], "bsmtfinsf1": [46, 72, 104, 177], "bsmtfinsf2": [46, 72, 104, 177], "bsmtunfsf": [46, 72, 104, 177], "totalbsmtsf": [46, 72, 104, 177], "1stflrsf": [46, 72, 104, 177], "2ndflrsf": [46, 72, 104, 177], "lowqualfinsf": [46, 72, 104, 177], "grlivarea": [46, 72, 104, 177], "bedroomabvgr": [46, 72, 104, 177], "kitchenabvgr": [46, 72, 104, 177], "totrmsabvgrd": [46, 72, 104, 177], "fireplac": [46, 72, 104, 177], "garagecar": [46, 72, 104, 177], "garagearea": [46, 72, 104, 177], "wooddecksf": [46, 72, 104, 177], "openporchsf": [46, 72, 104, 177], "enclosedporch": [46, 72, 104, 177], "3ssnporch": [46, 72, 104, 177], "screenporch": [46, 72, 90, 104, 177], "poolarea": [46, 72, 90, 104, 133, 177], "miscval": [46, 72, 90, 104, 177], "data_numer": [46, 76, 78, 79, 81, 83, 177], "largest": [46, 107], "1e0": 46, "000": [46, 48, 72, 88, 91, 99, 101, 107, 115, 116, 124, 127, 141, 145, 155, 156], "1e5": 46, "larger": [46, 81, 84, 102, 117, 122, 135, 137, 139, 140, 148, 149, 152, 155, 156, 157, 174], "notat": 46, "box": [46, 96, 107, 114, 123, 125, 126, 133, 136, 143, 146, 185], "garag": 46, "just": [46, 88, 100, 101, 102, 105, 108, 109, 110, 113, 117, 119, 122, 133, 136, 139, 141, 142], "logspac": [46, 92, 97, 107, 133, 149, 156], "num": [46, 73, 79, 84, 85, 86, 87, 88, 89, 90, 91, 92, 95, 97, 98, 103, 107, 110, 115, 119, 121, 128, 130, 133, 134, 136, 138, 140, 148, 149, 150, 152, 154, 155, 156], "101": [46, 185], "alpha_": [46, 133], "fall": [46, 101, 145, 154], "preprocessor": [46, 85, 86, 87, 88, 89, 90, 94, 104, 119, 130, 136, 148, 150, 151, 152, 154, 155, 177, 185], "deal": [46, 80, 84, 85, 89, 99, 100, 103, 106, 124, 127, 132, 135, 139, 141, 147, 158, 177], "onehotencod": [46, 71, 72, 84, 85, 86, 87, 88, 89, 90, 130, 133, 136], "categorical_featur": [46, 90, 104], "yield": [46, 89, 117, 138], "long": [46, 89, 104, 105, 107, 129, 135, 151], "splinetransform": [46, 132, 139], "influenc": [46, 95, 102, 108, 133, 139, 145, 154, 170], "nystroem": [46, 129, 131, 132, 135, 137, 139], "kernel": [46, 92, 97, 129, 131, 132, 135, 137, 139], "poli": [46, 129, 132, 135, 139], "n_compon": [46, 129, 131, 132, 135, 137, 139], "300": [46, 81, 105, 110, 115, 117, 121, 128, 134, 138, 140, 161], "studi": [48, 59, 73, 92, 94, 97, 101, 185], "apart": [48, 108], "estat": [48, 101], "thousand": [48, 101, 107, 108], "entertain": 48, "spaciou": 48, "updat": [48, 90, 185], "bedroom": [48, 101, 107, 108], "bathroom": 48, "lakeview": 48, "97630": 48, "1st": [48, 73, 84, 135], "nightlif": 48, "privat": [48, 73, 79, 84, 85, 136, 150, 154], "backyard": 48, "buyer": 48, "market": 48, "kind": [48, 73, 85, 89, 108, 122, 139, 145, 172, 185], "sub": [49, 96, 97, 157], "vocabulari": 49, "varianc": [50, 55, 57, 94, 102, 108, 145, 165], "low": [52, 63, 73, 75, 79, 83, 94, 103, 105, 107, 108, 110, 117, 132, 137, 142, 157, 161, 179], "littl": [52, 79, 94, 99, 153], "reduct": [52, 108], "steadi": 52, "label": [52, 57, 68, 75, 79, 84, 85, 88, 98, 99, 100, 103, 109, 110, 115, 121, 134, 136, 140, 141, 142, 143, 146, 162, 164], "slow": [52, 89, 117], "tradeoff": [52, 57, 102], "m2": [54, 56, 58, 165], "trade": [55, 57, 76, 131, 132, 137, 161, 165, 170, 172], "off": [55, 57, 73, 76, 91, 104, 131, 132, 137, 142, 161, 165, 170, 172], "character": [55, 108, 142], "why": [55, 59, 65, 73, 85, 93, 98, 105, 108, 137, 147, 161, 179], "aris": [55, 73], "Then": [55, 70, 76, 81, 85, 91, 101, 110, 114, 118, 123, 124, 125, 126, 127, 131, 132, 137, 139, 141, 143, 144, 145, 146, 147, 152, 154, 157], "quantifi": [55, 73, 102, 108, 128, 134, 185], "contrast": [55, 73, 84, 101, 115, 140, 162], "importantli": 55, "emphas": [55, 118], "happen": [57, 65, 73, 86, 88, 119, 135, 157], "suffer": [57, 81, 106], "lack": 57, "captur": [57, 73, 102, 108, 132, 133], "neither": [57, 99], "nor": 57, "still": [57, 76, 81, 84, 85, 89, 102, 107, 108, 109, 111, 120, 131, 132, 133, 137, 139, 145, 153, 154, 157], "variat": [57, 76, 101, 102, 108, 110, 133, 145], "fulli": [57, 84, 101, 113, 117, 122], "determin": [57, 84, 92, 97, 142, 145], "irreduc": 57, "decompos": 57, "chapter": [57, 185], "diagnos": 57, "blood_transfus": [59, 92, 97, 106, 142, 143, 146], "propos": [59, 185], "multiclass": [59, 157, 159, 163, 185], "proport": [59, 95, 98, 106, 137, 142, 145, 185], "twice": [59, 142, 185], "value_count": [59, 73, 74, 75, 83, 84, 99, 104, 106, 142, 185], "dummyclassifi": [59, 78, 83, 88, 93, 98, 142], "most_frequ": [59, 83, 88, 98, 104, 142], "balanced_accuraci": [59, 142, 143, 146, 185], "remaind": [59, 84, 85, 87, 89, 119, 148, 150, 152, 154, 155], "add": [59, 105, 108, 115, 116, 125, 129, 133, 135, 139, 140, 142, 143, 146, 160, 164], "faster": [59, 65, 73, 81, 117, 148, 155], "distanc": [59, 81, 103, 137, 156], "normal": [59, 81, 90, 94, 103, 104, 105, 106, 107, 108, 116, 141, 142, 145, 149, 150, 156, 157, 163], "irrelev": 59, "make_pipelin": [59, 65, 76, 81, 84, 85, 86, 87, 88, 89, 94, 97, 98, 99, 104, 107, 108, 110, 116, 119, 124, 125, 126, 127, 129, 131, 132, 133, 135, 136, 137, 139, 141, 156], "get_param": [59, 92, 97, 111, 120, 131, 137, 151, 181, 185], "n_neighbor": [59, 77, 82, 149, 156, 182, 185], "clearli": [59, 83, 95, 97], "param_rang": [59, 97, 102, 113, 122, 129, 135], "affirm": 59, "highli": [60, 73, 132], "much": [60, 73, 88, 89, 91, 98, 101, 102, 108, 116, 117, 119, 120, 125, 132, 135, 179], "m1": [62, 64, 67, 165], "adult_censu": [63, 73, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 93, 98, 103, 119, 130, 136, 148, 150, 151, 152, 153, 154, 155], "comma": [63, 73, 104, 105, 106], "file": [63, 73, 79, 80, 104, 105, 106, 143, 146, 185], "alreadi": [63, 73, 79, 80, 81, 85, 97, 108, 117, 119, 132, 133, 149, 154, 156], "packag": [63, 116, 133, 143, 146], "survei": 63, "incom": [63, 73, 79, 83, 91, 101, 103, 107, 108], "salari": [63, 107], "seaborn": [63, 73, 74, 75, 81, 103, 105, 106, 107, 108, 109, 110, 115, 121, 131, 134, 137, 138, 139, 140, 141, 150, 153, 157, 158, 161, 162, 163, 164], "visual": [63, 67, 72, 81, 84, 95, 97, 100, 101, 103, 105, 110, 115, 128, 130, 132, 133, 134, 136, 141, 142, 145, 150, 153, 157, 158, 162, 163, 165, 182], "scipi": [63, 103, 117, 120, 154], "organ": [63, 163], "five": [65, 80, 85, 101], "overlap": [65, 75, 76, 110, 147, 154], "lie": 65, "fewer": [65, 145], "jupyt": [67, 79, 81, 82, 85, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164, 165], "ordin": [68, 72, 85, 88, 103, 116], "string": [68, 79, 84, 85, 86, 88, 101, 104, 143, 146, 147, 152, 185], "meaning": [68, 84, 89, 124, 126, 127, 136, 140, 145, 150], "hot": [68, 84, 85, 130, 132, 136], "represent": [68, 79, 81, 82, 84, 85, 87, 89, 90, 95, 96, 101, 106, 107, 109, 116, 119, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "compani": [68, 100], "sector": 68, "construct": [68, 110, 119, 154], "retail": 68, "energi": [68, 100, 105], "insur": 68, "phone": 68, "sale": [68, 72, 85, 136], "depart": 68, "employe": 68, "profit": 68, "quarter": [68, 100], "head": [68, 72, 73, 75, 79, 80, 84, 85, 101, 104, 105, 106, 107, 108, 129, 135, 140, 150, 151, 154, 158], "tabl": [70, 73, 117, 119, 150], "progress": [70, 149, 156], "attent": [70, 109], "extend": [70, 79], "mix": [70, 71, 73, 85, 94, 161], "unknown": [70, 84, 86, 88, 119, 145], "notabl": [71, 142], "ordinalencod": [71, 84, 85, 86, 87, 88, 89, 119, 148, 150, 152, 154, 155, 177], "200_000": [72, 90], "astyp": [72, 90, 104, 132, 149, 154, 156, 161], "int": [72, 76, 90, 109, 154], "did": [72, 73, 81, 84, 93, 98, 100, 101, 104, 105, 117, 125, 127, 132, 137, 139, 142, 143, 146, 150, 151, 152, 154, 156, 159, 163, 182], "convert": [72, 101, 103, 109, 110, 121, 156], "info": [72, 80, 94, 104, 105, 106, 107], "examin": [72, 133], "36": [72, 75, 101, 104, 117, 120, 129, 135, 154, 158, 162], "select_dtyp": [72, 104, 130, 136, 144, 145, 147], "make_column_selector": [72, 84, 85, 86, 87, 88, 89, 119, 136, 148, 150, 152, 154, 155], "shown": [72, 73, 101, 115, 132, 137, 141, 145, 152, 159, 163], "among": [72, 84, 85, 99, 125, 156], "quantit": [72, 79, 115, 128, 134, 138], "exclud": [72, 73, 125], "overallqu": [72, 104], "overallcond": [72, 104], "yearbuilt": [72, 104, 133], "sole": [72, 127, 145], "treat": [72, 85, 90, 133, 142], "issu": [72, 73, 84, 85, 99, 100, 101, 105, 106, 116, 132, 133, 139], "rare": [72, 73, 84, 85, 104, 119, 133], "handle_unknown": [72, 84, 85, 86, 87, 88, 89, 90, 119, 136, 148, 150, 152, 154, 155], "mere": 72, "chanc": [72, 98, 101, 110, 124, 127, 142, 163], "partit": [72, 76, 143, 146, 157, 159, 161, 162, 163], "classifact": 72, "li": [72, 84, 110], "place": [73, 127, 133, 139], "workflow": 73, "1994": [73, 94, 103], "download": [73, 101, 107], "openml": [73, 80], "webpag": 73, "1590": [73, 80], "manipul": [73, 77, 82, 92, 97, 101], "tutori": 73, "earn": [73, 103, 150], "50k": [73, 78, 79, 80, 81, 82, 83, 85, 103, 150, 154], "year": [73, 79, 103, 104, 133, 150], "heterogen": [73, 79, 85, 104, 133], "employ": 73, "covari": 73, "workclass": [73, 79, 84, 85, 136, 150, 152, 154], "marit": [73, 79, 84, 85, 136, 150, 152, 154], "occup": [73, 79, 84, 85, 107, 108, 136, 150, 152, 154], "race": [73, 79, 84, 85, 105, 136, 150, 152, 154], "sex": [73, 79, 84, 85, 136, 150, 152, 154], "loss": [73, 76, 78, 79, 80, 81, 83, 84, 85, 103, 130, 136, 145, 147, 150, 151, 154], "week": [73, 76, 78, 79, 80, 81, 83, 84, 85, 103, 130, 136, 150, 151, 154], "countri": [73, 79, 84, 85, 136, 150, 152, 154], "11th": [73, 79, 84, 136, 150, 154], "marri": [73, 79, 84, 85, 136, 150, 154], "op": [73, 79, 84, 136, 150, 154], "inspct": [73, 79, 84, 136, 150, 154], "own": [73, 79, 84, 85, 117, 136, 139, 150, 154], "child": [73, 79, 84, 85, 136, 150, 154], "male": [73, 79, 84, 85, 136, 150, 154], "lt": [73, 79, 80, 154], "hs": [73, 79, 84, 85, 136, 150, 154], "grad": [73, 79, 84, 85, 136, 150, 154], "civ": [73, 79, 84, 85, 136, 150, 154], "spous": [73, 79, 84, 85, 136, 150, 154], "farm": [73, 79, 84, 136, 150, 154], "fish": [73, 79, 84, 136, 150, 154], "husband": [73, 79, 84, 85, 136, 150, 154], "white": [73, 79, 84, 85, 103, 131, 132, 136, 137, 141, 150, 154, 163], "local": [73, 79, 84, 90, 106, 136, 139, 150, 154], "gov": [73, 79, 84, 136, 150, 154], "assoc": [73, 79, 84, 136, 150, 154], "acdm": [73, 79, 84, 136, 150, 154], "protect": [73, 79, 84, 136, 150, 154], "serv": [73, 79, 84, 87, 89, 105, 136, 142, 150, 154], "gt": [73, 79, 154], "colleg": [73, 79, 84, 136, 150, 154], "7688": [73, 79, 130, 136, 150, 151, 154], "femal": [73, 79, 84, 85, 136, 150, 154], "revenu": [73, 83, 84, 103, 133], "target_column": [73, 109, 131, 137, 141, 157, 158, 159, 163], "37155": [73, 83], "11687": [73, 83], "dtype": [73, 75, 79, 80, 81, 82, 83, 84, 85, 86, 88, 91, 98, 101, 103, 104, 105, 106, 107, 109, 133, 141, 142, 150, 154, 156, 157], "int64": [73, 75, 79, 83, 84, 104, 106, 156], "imbal": [73, 106, 137], "special": [73, 105], "healthi": 73, "ill": [73, 133], "numerical_column": [73, 76, 78, 79, 81, 83, 85, 87, 89, 130, 136, 151], "categorical_column": [73, 84, 85, 86, 87, 88, 89, 130, 136, 150, 152, 154], "all_column": 73, "print": [73, 76, 79, 80, 81, 82, 83, 84, 85, 87, 88, 89, 90, 94, 96, 97, 99, 100, 101, 107, 108, 109, 110, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 125, 127, 128, 133, 134, 135, 136, 138, 139, 140, 141, 142, 143, 145, 146, 147, 149, 150, 151, 152, 154, 155, 156, 157, 163], "48842": [73, 79, 84, 130, 136, 150, 154], "14": [73, 91, 94, 104, 105, 116, 117, 120, 132, 142, 145, 150, 154, 156], "subtract": [73, 81], "mayb": [73, 94, 102, 107], "peculiar": [73, 104], "malfunct": 73, "afterward": [73, 80, 108], "cap": [73, 96, 107, 123, 125, 126, 133, 136, 146], "hist": [73, 75, 91, 94, 98, 101, 102, 104, 105, 106, 107, 141], "figsiz": [73, 75, 103, 104, 105, 106, 107, 108, 132, 133, 136, 142, 145, 157, 161, 162, 163], "func": [73, 101, 107], "assign": [73, 79, 81, 84, 98, 104, 109, 115, 132, 139, 141, 144, 147], "underscor": [73, 81, 151], "garbag": 73, "comment": 73, "retir": 73, "filter": [73, 84, 105, 150], "peak": 73, "ll": 73, "32650": 73, "16192": 73, "disproport": 73, "fair": [73, 96, 118], "deploi": [73, 85, 101, 152, 162], "reader": [73, 110, 136, 139, 141, 145], "mitig": [73, 117], "deploy": [73, 152], "compon": [73, 129, 135, 139, 157, 181], "unexpect": [73, 99], "gender": 73, "beyond": [73, 103, 139, 141, 145, 147], "15784": 73, "10878": 73, "bachelor": [73, 84, 85, 136], "8025": 73, "master": [73, 84, 136], "2657": 73, "voc": [73, 84, 136], "2061": 73, "1812": 73, "1601": 73, "10th": [73, 84, 136], "1389": 73, "7th": [73, 84, 136], "955": 73, "prof": [73, 84, 85, 136], "school": [73, 84, 94, 136], "834": 73, "9th": [73, 84, 136], "756": [73, 116], "12th": [73, 84, 136], "657": 73, "doctor": [73, 84, 136], "594": 73, "5th": [73, 84, 135, 136], "6th": [73, 84, 136], "509": 73, "4th": [73, 84], "247": 73, "preschool": [73, 84], "83": [73, 116, 120], "crosstab": 73, "entri": [73, 76, 101, 104, 105, 106, 107, 114, 123, 128, 134, 141], "lose": 73, "redund": [73, 101, 125, 129, 130, 135, 136, 139], "upcom": [73, 142, 151], "latter": [73, 81, 96, 125, 142], "pairplot": [73, 74, 75, 103, 105, 106, 107, 108, 158], "diagon": [73, 103, 106, 132, 142, 145, 150, 158], "reveal": [73, 101], "sn": [73, 81, 105, 106, 107, 108, 109, 110, 115, 121, 131, 134, 137, 138, 139, 140, 141, 150, 153, 157, 158, 161, 162, 163, 164], "readabl": [73, 150, 153, 179], "n_samples_to_plot": 73, "5000": [73, 106, 125, 126, 134, 136, 140], "var": 73, "hue": [73, 75, 105, 106, 107, 109, 131, 137, 141, 153, 157, 158, 161, 163], "plot_kw": [73, 108], "height": [73, 75, 129, 135], "diag_kind": [73, 108], "diag_kw": 73, "written": [73, 94, 107], "scatterplot": [73, 81, 107, 109, 110, 115, 121, 131, 134, 137, 138, 139, 140, 141, 153, 157, 158, 161, 162, 163, 164], "region": [73, 101, 102, 132, 137, 141, 150, 154], "pyplot": [73, 81, 91, 94, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 134, 136, 137, 138, 141, 142, 145, 146, 157, 161, 162, 163, 164], "plt": [73, 81, 91, 94, 96, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 115, 121, 123, 125, 126, 131, 132, 133, 134, 136, 137, 138, 141, 142, 145, 146, 157, 161, 162, 163, 164], "ax": [73, 81, 103, 104, 105, 108, 109, 119, 131, 132, 133, 134, 136, 137, 139, 140, 142, 145, 150, 153, 157, 161, 162, 163], "age_limit": 73, "axvlin": [73, 107, 108], "ymin": [73, 107], "ymax": [73, 107], "linestyl": [73, 107, 110, 121, 131, 132, 137, 142, 162], "hours_per_week_limit": 73, "axhlin": 73, "xmin": 73, "xmax": 73, "annot": [73, 150], "fontsiz": 73, "AND": [73, 136], "seem": [73, 76, 79, 82, 83, 87, 89, 97, 100, 103, 107, 108, 117, 119, 122, 133, 134, 145, 151, 157], "complic": [73, 107, 125], "similarli": [73, 92, 93, 95, 97, 98, 100, 103, 112, 117, 119, 121, 133, 141, 142, 154, 164], "somewhat": [73, 110], "arbitrari": [73, 84, 85, 86, 88, 89, 90, 108, 140], "straightforward": [73, 110], "obviou": [73, 94, 105], "highlight": [73, 80, 84, 94, 96, 100, 109, 115, 124, 125, 127, 133, 142, 150, 152, 157, 162], "imagin": [74, 75, 108], "feel": [74, 75, 84, 114, 117, 123, 133, 147, 185], "penguins_classif": [74, 75, 109, 131, 137, 141, 157, 158, 159, 161, 163], "There": [75, 81, 101, 103, 106], "39": [75, 84, 85, 104, 116, 118, 120, 129, 135, 154, 158], "adeli": [75, 131, 137, 141, 157, 158, 163], "151": [75, 84, 153], "gentoo": [75, 157, 158, 163], "123": 75, "chinstrap": [75, 131, 137, 141, 157, 158], "pairplot_figur": [75, 158], "prioriti": 75, "tweak": 75, "subfigur": 75, "perfectli": [75, 89, 103, 106, 115, 117, 139, 145], "downsid": [76, 135], "amount": [76, 94, 101, 106, 119, 133], "smaller": [76, 101, 116, 117, 122, 135, 137, 147, 174], "repetit": [76, 94, 150], "aggreg": [76, 107, 114, 123, 133, 142], "clone": [76, 110], "earlier": [76, 85, 107, 118, 142, 157], "computation": [76, 116, 139, 150, 179], "intens": [76, 98, 179], "cv_result": [76, 84, 85, 87, 88, 89, 90, 97, 101, 102, 107, 117, 118, 120, 125, 126, 133, 135, 147, 150, 151, 152, 153, 154, 156, 179], "cpu": [76, 85, 102, 115, 118, 150, 154], "454": 76, "ms": [76, 85, 102, 118, 150, 154], "sy": [76, 85, 102, 118, 150, 154], "259": 76, "total": [76, 85, 94, 100, 101, 102, 104, 105, 106, 107, 108, 118, 130, 135, 136, 142, 150, 154, 156], "713": 76, "wall": [76, 85, 102, 118, 150, 154], "402": 76, "fit_tim": [76, 84, 85, 97, 101, 115, 116, 125, 146, 152], "05886054": 76, "05734253": 76, "05754614": 76, "05606604": 76, "05529737": 76, "score_tim": [76, 84, 85, 97, 101, 115, 116, 125, 146, 152], "01399994": 76, "0136869": 76, "01379323": 76, "01350665": 76, "01337504": 76, "79557785": 76, "80049135": 76, "79965192": 76, "79873055": 76, "80456593": 76, "iii": 76, "distinct": [76, 79, 96, 99], "match": [76, 77, 82, 96, 141], "stabil": [76, 108], "discard": [76, 101, 107, 109, 157], "round": [76, 93, 98, 109], "themselv": 76, "3f": [76, 79, 80, 81, 83, 84, 85, 87, 88, 89, 90, 94, 96, 97, 99, 107, 115, 116, 118, 119, 121, 123, 127, 128, 134, 135, 141, 142, 145, 146, 147, 151, 152, 155, 157], "std": [76, 79, 81, 84, 85, 87, 88, 89, 90, 91, 94, 96, 97, 99, 100, 101, 107, 108, 110, 115, 116, 118, 119, 123, 129, 133, 135, 146, 147, 151, 152], "800": [76, 151], "crucial": [76, 108, 117], "bar": [76, 99, 104, 108, 133, 141, 157], "decim": 76, "trustworthi": [76, 96], "compat": [76, 152], "familiar": [77, 82, 107, 114, 123, 144, 147], "conveni": [77, 82, 133], "directli": [77, 80, 82, 85, 108, 115, 136, 139, 147, 157], "insid": [77, 82, 84, 104, 143, 146, 177], "pager": [77, 82], "roughli": [78, 83, 110, 122, 140], "simplest": [78, 83], "irrespect": [78, 83, 98, 132, 179, 185], "train_test_split": [78, 79, 81, 83, 85, 100, 101, 108, 111, 112, 113, 117, 120, 121, 122, 124, 127, 131, 137, 141, 142, 145, 148, 149, 150, 152, 154, 155, 156, 157, 159, 163], "behavior": [78, 83, 131, 133, 137, 142, 162], "oversimplifi": 79, "exclus": [79, 132], "helper": [79, 81, 84, 85, 128, 134, 139, 143, 146], "duplic": [79, 84, 85, 110, 150, 154], "48837": [79, 84, 130, 136, 150, 154], "48838": [79, 84, 130, 136, 150, 154], "48839": [79, 84, 130, 136, 150, 154], "48840": [79, 84, 130, 136, 150, 154], "48841": [79, 84, 130, 136, 150, 154], "explicit": [79, 80, 101, 139, 144, 147], "At": [79, 96, 99, 101, 110, 142, 159, 163], "moreov": 79, "o": [79, 163], "explanatori": [79, 100], "000000": [79, 81, 91, 105, 107], "643585": 79, "710510": 79, "min": [79, 81, 91, 105, 107, 110, 128, 133, 134, 138, 140, 161, 162, 164, 174], "37": [79, 80, 81, 84, 101, 104, 107, 108, 117, 153, 154], "max": [79, 81, 91, 105, 107, 110, 128, 133, 134, 138, 139, 140, 155, 161, 162, 164, 174, 179], "float64": [79, 91, 98, 101, 104, 105, 106, 107, 141], "unusu": 79, "memori": [79, 100, 104, 105, 106, 107, 135, 139, 151, 152], "test_siz": [79, 91, 95, 98, 101, 102, 111, 113, 120, 122, 131, 137, 142, 152], "determinist": [79, 98, 139], "specifi": [79, 84, 85, 104, 105, 119, 140, 152, 154, 182], "quickli": [79, 103, 104, 107, 108, 117, 139, 141, 142, 153, 154, 163], "got": [79, 114, 123, 164], "1f": [79, 110, 147], "12211": 79, "36631": [79, 81], "cours": [79, 84, 104, 125, 128, 134, 139, 143, 146, 159, 163], "environ": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "pleas": [79, 81, 82, 85, 90, 93, 96, 98, 101, 105, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164, 177, 185], "rerun": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "unabl": [79, 81, 82, 85, 90, 96, 99, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "render": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "nbviewer": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "nbsp": [79, 81, 82, 85, 90, 96, 101, 109, 132, 133, 136, 138, 139, 142, 150, 152, 154, 157, 163, 164], "logisticregressionifittedlogisticregress": [79, 142, 157], "807": [79, 81], "fraction": [79, 102, 117, 142, 145], "correctli": [79, 89, 90, 99, 109, 142], "visit": 80, "glossari": [80, 165], "fed": 80, "73": [80, 104], "3273": 80, "side": [80, 91, 119, 122, 152, 162], "39068": 80, "39069": 80, "39070": 80, "39071": 80, "39072": 80, "39073": 80, "linger": [80, 84, 94, 101], "denomin": 80, "major": [80, 83, 98, 137], "seldom": 80, "target_predict": [80, 100, 101, 109, 120, 121, 128, 134, 139, 142, 145, 161, 162], "sake": [80, 90, 93, 98, 117, 139, 152, 157, 177], "simplic": [80, 90, 93, 98, 139, 157, 177], "agre": [80, 122, 141], "bool": [80, 104, 142], "mistak": [80, 109, 127, 137, 142], "success": [80, 81, 115, 163], "8242776341719346": 80, "harder": [80, 125], "conclud": [80, 86, 88, 98, 110, 137, 157], "ones": [80, 84, 137, 150, 151, 154], "adult_census_test": [80, 82], "9769": 80, "manual": [80, 84, 85, 110, 115, 124, 127, 139, 142, 151, 157, 165, 182], "model_nam": [80, 81, 151], "__class__": [80, 81], "__name__": [80, 81], "804": 80, "underli": [80, 81, 84, 94, 109, 115, 142], "wrongli": [80, 85], "held": [80, 91, 113, 117, 122, 152], "642352": 81, "1087": 81, "077721": 81, "665311": 81, "431247": 81, "725748": 81, "7522": 81, "692939": 81, "407": 81, "110175": 81, "423952": 81, "99999": 81, "4356": 81, "99": 81, "span": [81, 133], "assumpt": [81, 84, 88, 100, 126, 161, 162], "address": 81, "pair": [81, 103, 105, 106, 107, 128, 130, 134, 136, 153], "solver": [81, 133, 147], "descent": [81, 105, 147], "scaler": [81, 90, 151, 156, 179, 181, 185], "standardscalerifittedstandardscal": 81, "wherea": [81, 117, 131, 135, 137, 147, 149, 156], "fashion": [81, 163], "mean_": 81, "64235211": 81, "07772106": 81, "6653108": 81, "43124676": 81, "scale_": 81, "72556083": 81, "59025606": 81, "10461772": 81, "42378265": 81, "data_train_sc": 81, "17177061": 81, "14450843": 81, "71188483": 81, "28845333": 81, "02605707": 81, "22025127": 81, "27618374": 81, "33822677": 81, "77019645": 81, "77536738": 81, "03471139": 81, "53605445": 81, "48319243": 81, "69090725": 81, "perspect": [81, 87, 89, 102], "predefin": 81, "shorthand": 81, "preserv": [81, 99, 150], "set_output": [81, 84, 129, 133, 135], "behaviour": [81, 100, 117, 126], "663100e": 81, "273364e": 81, "530310e": 81, "840667e": 81, "844684e": 81, "000014e": 81, "576792e": 81, "445084e": 81, "202513e": 81, "173852e": 81, "753674e": 81, "471139e": 81, "196565e": 81, "817680e": 81, "677425e": 81, "741752e": 81, "314865e": 81, "047970e": 81, "714245e": 81, "jointplot": 81, "clearer": 81, "num_points_to_plot": 81, "marginal_kw": 81, "dict": [81, 103, 132, 151], "suptitl": [81, 132, 142, 145, 163], "nbefor": 81, "nafter": 81, "x27": [81, 85, 90, 96, 109, 132, 133, 136, 139, 150, 152, 154], "pipelineinot": [81, 85, 90, 132, 136, 139, 150, 152, 154], "fittedpipelin": [81, 85, 90, 132, 136, 139, 150, 152, 154], "standardscalerstandardscal": [81, 85, 90, 132, 136], "logisticregressionlogisticregress": [81, 85, 90, 132, 136], "named_step": 81, "decision_funct": 81, "elapsed_tim": [81, 87, 89], "predicted_target": 81, "n_iter_": [81, 123], "055": 81, "120": 81, "scenario": [81, 85, 133, 141, 145], "kneighborsclassifierifittedkneighborsclassifi": 82, "first_data_valu": 82, "first_predict": 82, "first_target_valu": 82, "number_of_correct_predict": 82, "number_of_predict": 82, "len": [82, 101, 103, 108, 109, 116, 117, 132, 136, 163], "8290379545978042": 82, "8177909714402702": 82, "data_numeric_train": 83, "data_numeric_test": 83, "class_to_predict": 83, "high_revenue_clf": 83, "234": 83, "low_revenue_clf": 83, "766": 83, "7607182343065395": 83, "appear": [83, 110], "most_freq_revenue_clf": 83, "frequent": [83, 88, 93, 98, 104, 154, 174], "reassur": [83, 122, 152], "sequenc": [84, 115, 148, 155], "arithmet": 84, "instruct": 84, "taken": [84, 108, 119, 140], "symbol": [84, 100], "sort_index": 84, "857": [84, 155], "cambodia": 84, "canada": 84, "182": 84, "china": 84, "122": [84, 101, 107, 108], "columbia": 84, "85": [84, 90, 101, 107, 108, 154, 179], "cuba": 84, "138": [84, 146], "dominican": 84, "republ": 84, "103": [84, 105, 147, 157], "ecuador": 84, "el": 84, "salvador": 84, "155": [84, 154], "england": 84, "127": 84, "franc": 84, "germani": 84, "206": [84, 162], "greec": 84, "guatemala": 84, "88": [84, 101, 107, 108, 117, 150, 154], "haiti": 84, "holand": 84, "netherland": 84, "hondura": 84, "hong": 84, "hungari": 84, "india": 84, "iran": 84, "59": [84, 104, 105, 120], "ireland": 84, "itali": 84, "105": [84, 105], "jamaica": 84, "106": [84, 105], "japan": 84, "lao": 84, "mexico": [84, 136], "951": 84, "nicaragua": 84, "outli": 84, "guam": 84, "usvi": 84, "peru": 84, "philippin": 84, "295": 84, "poland": 84, "87": [84, 117, 143, 146, 150], "portug": 84, "67": [84, 104, 105, 108], "puerto": 84, "rico": 84, "184": 84, "scotland": 84, "south": [84, 108], "115": [84, 101], "taiwan": 84, "thailand": 84, "trinadad": 84, "tobago": 84, "43832": 84, "vietnam": 84, "86": [84, 90, 101, 107, 108, 153, 154], "yugoslavia": 84, "recogn": [84, 94], "categorical_columns_selector": [84, 85, 86, 87, 88, 89, 150, 152, 154], "dtype_includ": [84, 85, 86, 87, 88, 89, 119, 136, 148, 150, 152, 154, 155], "unwant": [84, 107], "data_categor": [84, 86, 88], "education_column": 84, "education_encod": 84, "map": [84, 88, 103, 131, 132, 133, 136, 137, 141, 156], "categories_": 84, "data_encod": 84, "downstream": [84, 135], "lexicograph": 84, "meaningless": [84, 110, 162], "l": [84, 94], "xl": 84, "alphabet": 84, "constructor": 84, "explicitli": [84, 143, 146, 154, 156], "mislead": [84, 89, 108], "altern": [84, 129, 135, 139, 141, 154, 161], "sparse_output": [84, 87, 89], "education_": [84, 136], "spars": [84, 87, 89, 101, 107], "effici": [84, 109, 116, 119, 133, 139], "won": [84, 108], "becam": 84, "workclass_": [84, 136], "feder": [84, 136], "emp": [84, 136], "inc": [84, 136], "country_": [84, 136], "amp": 84, "102": [84, 105], "violat": [84, 100], "realli": [84, 94, 100, 102, 105, 109, 137, 142], "misord": 84, "misus": 84, "ineffici": 84, "integr": [84, 116, 133], "abl": [84, 85, 97, 103, 105, 110, 111, 115, 116, 119, 120, 130, 136, 142, 143, 146, 153, 157, 160, 162, 164, 171, 182, 185], "bypass": 84, "keyword": 84, "min_frequ": [84, 130, 136], "collaps": 84, "rarest": 84, "enabl": [84, 185], "infrequent_if_exist": 84, "sandbox": [84, 179], "use_encoded_valu": [84, 85, 86, 87, 88, 89, 119, 148, 150, 152, 154, 155], "unknown_valu": [84, 85, 86, 87, 88, 89, 119, 148, 150, 152, 154, 155], "silenc": 84, "convergencewarn": 84, "18205142": 84, "1719234": 84, "17490363": 84, "18012547": 84, "16994405": 84, "02308416": 84, "02262449": 84, "023211": 84, "02415061": 84, "02236366": 84, "83232675": 84, "83570478": 84, "82831695": 84, "83292383": 84, "83497133": 84, "833": [84, 88, 155], "decoupl": [85, 142], "numerical_columns_selector": [85, 87, 89], "dtype_exclud": [85, 87, 89, 136], "properli": [85, 96, 105, 113, 122, 161], "format": [85, 100, 105, 134, 140], "elaps": [85, 125], "introspect": [85, 185], "send": 85, "columntransfom": 85, "categorical_preprocessor": [85, 87, 89, 148, 150, 152, 154, 155], "numerical_preprocessor": 85, "associ": [85, 94, 105, 107, 133, 142, 145, 150], "standard_scal": 85, "concaten": [85, 93, 94, 98, 99, 132, 139, 152, 161], "columntransformercolumntransform": [85, 90, 136, 150, 152, 154], "onehotencoderonehotencod": [85, 90, 136], "prefer": 85, "raw": [85, 133, 137, 145, 185], "7762": 85, "56": [85, 104, 105, 120, 146], "divorc": [85, 136], "unmarri": [85, 136], "23881": 85, "transport": [85, 136], "30507": 85, "specialti": [85, 136], "14344": 85, "28911": 85, "19484": 85, "wife": [85, 136], "8575055278028008": 85, "usabl": 85, "25311279": 85, "25740528": 85, "22811413": 85, "24582815": 85, "26973963": 85, "02794933": 85, "02916598": 85, "02738404": 85, "02908301": 85, "02688217": 85, "85116184": 85, "84993346": 85, "8482801": 85, "85257985": 85, "85544636": 85, "851": [85, 119], "002": [85, 88, 89, 151], "compound": 85, "isol": [85, 101, 117], "nice": [85, 109, 139], "fast": [85, 89, 115], "passthrough": [85, 87, 89, 119, 148, 150, 152, 154, 155], "670": 85, "690": 85, "8792891655065105": 85, "significantli": [85, 102, 108], "whenev": [85, 105], "popular": [85, 103, 119], "datasci": 85, "practition": 85, "outperform": 85, "assembl": [86, 88, 115, 117], "rais": [86, 88, 109, 110, 119, 121, 143, 146], "warn": [86, 88, 109, 110, 116, 121, 133, 143, 146], "nan": [86, 88, 90, 99, 104, 143, 146], "traceback": [86, 88, 143, 146], "error_scor": [86, 88], "awai": [86, 88, 89, 110, 137, 145, 165], "handi": [86, 88, 101, 105, 143, 146], "empir": [87, 89, 101], "util": [87, 89, 90, 98, 104, 143, 146], "873": [87, 89], "214": 87, "detriment": [87, 89, 117, 119, 133], "dens": [87, 89], "workaround": [87, 89], "755": 88, "rel": [88, 93, 98, 101, 118, 126, 136, 139, 142, 145], "anyth": [88, 100, 124, 127, 142], "constantli": [88, 93, 98], "761": 88, "messag": [88, 89], "289": 89, "307": 89, "signific": [89, 108, 117, 125, 132, 133, 152], "useless": [89, 122], "988": 89, "view": [89, 152], "longer": [89, 133, 137, 140, 151, 158, 163], "current": [89, 122, 177], "incomplet": 89, "unnecessari": [89, 113, 122], "unless": 89, "reproduc": [90, 105, 152], "script": 90, "event": 90, "rerecord": 90, "ui": 90, "releas": 90, "house_pric": [90, 104, 144, 145, 147], "na_valu": [90, 104], "id": [90, 94, 104], "mssubclass": [90, 104], "mszone": [90, 104], "street": [90, 104], "allei": [90, 104], "lotshap": [90, 104], "landcontour": [90, 104], "poolqc": [90, 104], "fenc": [90, 104], "miscfeatur": [90, 104], "mosold": [90, 104], "yrsold": [90, 104, 133], "saletyp": [90, 104], "salecondit": [90, 104], "rl": [90, 104], "8450": [90, 104], "pave": [90, 104], "reg": [90, 104, 108], "lvl": [90, 104], "allpub": [90, 104], "2008": [90, 104], "wd": [90, 104], "9600": [90, 104], "2007": [90, 104], "11250": [90, 104], "ir1": [90, 104], "9550": [90, 104], "2006": [90, 104], "abnorml": [90, 104], "84": [90, 104, 135], "14260": [90, 104], "1455": 90, "1456": 90, "7917": 90, "1457": 90, "13175": 90, "mnprv": [90, 104], "2010": 90, "1458": 90, "9042": 90, "gdprv": 90, "shed": [90, 104], "2500": 90, "1459": [90, 104], "9717": 90, "1460": [90, 104], "9937": 90, "cherri": 90, "retain": [90, 130, 133, 136], "numeric_featur": 90, "fullbath": [90, 104], "halfbath": [90, 104], "neighborhood": [90, 91, 104, 108], "housestyl": [90, 104], "imput": [90, 104], "simpleimput": [90, 104], "numeric_transform": 90, "categorical_transform": 90, "join": 90, "simpleimputersimpleimput": 90, "859": [90, 155], "018": 90, "dollar": [90, 91, 101, 107, 133], "necessarili": [90, 101, 102, 118, 132, 133, 147, 150, 182], "richer": [90, 139], "level": [90, 96, 98, 117, 119, 124, 125, 127, 131, 132, 136, 137, 142, 159, 160, 161, 163, 164, 177], "coars": 90, "dummyregressor": [91, 145], "overview": [91, 92, 95, 97, 101, 102, 107, 109, 111, 112, 113, 116, 118, 119, 120, 121, 122, 128, 129, 131, 133, 134, 135, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 157, 159, 160, 161, 162, 163, 164, 165], "fetch_california_h": [91, 95, 101, 102, 107, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 149, 156], "return_x_i": [91, 96, 99, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 149, 156, 179], "as_fram": [91, 95, 99, 101, 102, 107, 108, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 149, 156], "rescal": [91, 95, 102, 111, 113, 114, 115, 116, 117, 118, 120, 122, 123, 133, 145, 149, 151, 156], "splitter": 91, "cv_results_tree_regressor": 91, "n_job": [91, 94, 95, 96, 97, 98, 100, 102, 107, 108, 115, 116, 117, 118, 119, 120, 122, 123, 125, 127, 133, 135, 136, 150, 152, 154, 156], "errors_tree_regressor": 91, "832692": 91, "217875": 91, "260987": 91, "823162": 91, "836318": 91, "695681": 91, "329163": 91, "result_dummi": 91, "errors_dummy_regressor": 91, "91": [91, 104], "140009": 91, "821140": 91, "757566": 91, "543652": 91, "034555": 91, "979007": 91, "477244": 91, "all_error": 91, "concat": [91, 98, 99, 108, 123, 125, 126, 137], "47": [91, 101, 104, 120], "076108": 91, "713153": 91, "897358": 91, "539353": 91, "313195": 91, "941912": 91, "092870": 91, "213912": 91, "015862": 91, "419226": 91, "542490": 91, "330733": 91, "961501": 91, "390784": 91, "947952": 91, "777050": 91, "991373": 91, "625482": 91, "023571": 91, "719080": 91, "556965": 91, "306048": 91, "539567": 91, "452684": 91, "185225": 91, "592580": 91, "298971": 91, "553189": 91, "084639": 91, "281297": 91, "984471": 91, "123369": 91, "981744": 91, "710941": 91, "547140": 91, "058690": 91, "820219": 91, "768721": 91, "911982": 91, "305556": 91, "490034": 91, "503017": 91, "542629": 91, "147974": 91, "369182": 91, "386320": 91, "760654": 91, "815660": 91, "200337": 91, "216574": 91, "189938": 91, "107460": 91, "740784": 91, "620318": 91, "502880": 91, "165331": 91, "linspac": [91, 95, 97, 98, 110, 115, 121, 128, 134, 138, 140, 153, 163], "edgecolor": [91, 94, 98, 101, 102, 104, 105, 106, 107, 132, 153, 163], "legend": [91, 94, 98, 99, 100, 105, 107, 109, 110, 115, 121, 131, 134, 136, 137, 141, 142, 153, 157, 161, 162, 163, 164], "bbox_to_anchor": [91, 94, 98, 99, 100, 105, 107, 109, 110, 115, 121, 131, 134, 136, 137, 142, 153, 157, 161, 163], "loc": [91, 94, 98, 99, 100, 105, 107, 109, 110, 115, 121, 131, 134, 136, 137, 141, 142, 153, 157, 161, 163], "upper": [91, 94, 98, 99, 100, 105, 107, 108, 110, 115, 121, 131, 136, 137, 142, 157, 161, 163], "xlabel": [91, 94, 96, 97, 98, 99, 101, 102, 104, 105, 106, 108, 122, 123, 125, 126, 132, 133, 135, 136, 141, 142], "Such": [91, 133], "extrem": [91, 96, 106, 107, 124, 127, 132, 133], "gamma": [92, 96, 97, 131, 132, 137], "svm": [92, 96, 97, 139], "form": [92, 94, 96, 97, 108, 128, 129, 134, 135, 138, 140, 145, 151], "accomplish": [92, 97], "rbf": [92, 97, 131, 132, 137], "svc": [92, 96, 97], "scheme": [92, 97, 100, 109, 162], "validationcurvedisplai": [92, 97, 102, 122, 129, 135], "10e": [92, 97], "10e2": [92, 97], "logarithm": [92, 97], "svc__gamma": [92, 97], "retriev": [92, 97, 101, 108, 133], "learningcurvedisplai": [92, 95, 97], "half": [93, 98, 142], "uniform": [93, 98, 107, 110, 132, 154], "handwritten": 94, "digit": 94, "load_digit": 94, "recreat": 94, "minmaxscal": [94, 110, 133, 185], "kfold": [94, 96, 99, 123, 144, 147, 152], "test_score_no_shuffl": 94, "931": 94, "027": 94, "test_score_with_shuffl": 94, "964": 94, "006": [94, 119, 123], "all_scor": [94, 96], "xlim": [94, 108, 115, 142], "impos": [94, 117, 154], "94166667": 94, "89722222": 94, "94707521": 94, "96657382": 94, "90250696": 94, "ship": 94, "descr": [94, 101, 107], "_digits_dataset": 94, "optic": 94, "recognit": 94, "characterist": [94, 101, 107, 142], "1797": 94, "8x8": 94, "pixel": 94, "creator": 94, "alpaydin": 94, "boun": 94, "edu": 94, "tr": 94, "juli": 94, "1998": 94, "copi": [94, 101, 105, 108, 128, 130, 134, 136, 179], "uci": 94, "ic": 94, "nist": 94, "bitmap": 94, "preprint": 94, "32x32": 94, "nonoverlap": 94, "block": [94, 100, 101, 107, 108, 116], "4x4": 94, "invari": [94, 132], "distort": 94, "garri": 94, "j": 94, "candela": 94, "dimmick": 94, "geist": 94, "grother": 94, "janet": 94, "wilson": 94, "handprint": 94, "nistir": 94, "5469": 94, "kaynak": 94, "1995": 94, "Their": [94, 141], "msc": 94, "thesi": 94, "institut": 94, "graduat": 94, "bogazici": 94, "univers": 94, "cascad": 94, "kybernetika": 94, "ken": 94, "tang": 94, "ponnuthurai": 94, "n": [94, 96, 97, 107, 110, 116, 120, 123, 128, 131, 133, 134, 137, 141, 142, 151, 152, 155, 157], "suganthan": 94, "xi": 94, "yao": 94, "kai": 94, "qin": 94, "dimensionalityreduct": 94, "lda": 94, "electr": [94, 104], "electron": 94, "nanyang": 94, "2005": 94, "claudio": 94, "gentil": 94, "nip": 94, "2000": 94, "writer": 94, "wrote": 94, "certain": [94, 103, 123], "130": [94, 116], "hypothesi": [94, 100, 133], "itertool": [94, 104], "bound": [94, 142, 145], "writer_boundari": 94, "256": [94, 116, 117, 154], "386": 94, "516": 94, "646": 94, "776": 94, "915": 94, "1029": 94, "1157": 94, "1287": 94, "1415": 94, "1545": 94, "1667": 94, "zeros_lik": [94, 109], "lower_bound": 94, "upper_bound": 94, "group_id": 94, "lb": 94, "zip": [94, 105, 115, 128, 132, 134], "ytick": [94, 99], "xtick": 94, "ylabel": [94, 99, 100, 102, 122, 123, 132, 133, 135, 136, 141, 142, 157], "groupkfold": 94, "921": 94, "021": 94, "realiti": 94, "synthet": [95, 110, 115, 125, 130, 132, 136, 142, 160, 161, 162, 164], "train_siz": [95, 97, 148, 155], "endpoint": 95, "325": [95, 105], "775": 95, "displai": [95, 103, 112, 121, 141, 142, 153, 179], "from_estim": [95, 97, 102, 109, 122, 131, 132, 135, 137, 141, 142, 157, 161, 163], "score_typ": [95, 97], "negate_scor": [95, 102, 122, 135], "neg_": [95, 101, 129, 135, 147], "score_nam": [95, 97], "std_display_styl": [95, 97, 102, 122, 135], "errorbar": [95, 97, 102, 122, 133, 135], "ax_": [95, 97, 102, 122, 131, 135, 137, 142], "xscale": [95, 133], "log": [95, 133, 145, 153, 154], "alon": [95, 132], "anymor": [95, 98, 100, 101, 117], "bay": 95, "especi": [95, 133, 136], "report": [95, 96, 101], "problemat": [96, 133, 154], "underestim": 96, "philosoph": 96, "breast": 96, "cancer": 96, "load_breast_canc": 96, "param_grid": [96, 118, 120, 150, 152, 161, 179, 185], "model_to_tun": 96, "gridsearchcvifittedgridsearchcv": [96, 150, 152], "svcsvc": 96, "best_params_": [96, 123, 149, 150, 152, 154, 156, 161, 179, 185], "best_score_": 96, "627": 96, "stage": [96, 99, 116, 124, 125, 127, 139, 142, 163], "misinterpret": 96, "forget": 96, "pitfal": [96, 136], "emb": [96, 152], "dedic": [96, 145], "declar": 96, "inner_cv": 96, "outer_cv": 96, "014": 96, "trial": 96, "test_score_not_nest": 96, "test_score_nest": 96, "n_trial": 96, "non_nest": 96, "append": [96, 99, 108, 110, 121, 123, 137, 147], "merg": [96, 125], "whisker": [96, 107, 123, 125, 126, 133, 136, 146], "vert": [96, 107, 123, 125, 126, 133, 136, 146], "highest": [96, 109, 124, 125, 127, 142, 145, 153, 154], "lure": 96, "overli": [96, 101], "012239": 97, "002296": 97, "680000": 97, "011681": 97, "002262": 97, "746667": 97, "011859": 97, "002250": 97, "786667": 97, "011066": 97, "002401": 97, "800000": 97, "011485": 97, "002170": 97, "011416": 97, "002154": 97, "011133": 97, "002183": 97, "009980": 97, "002140": 97, "826667": 97, "010160": 97, "002126": 97, "010295": 97, "002155": 97, "733333": 97, "765": 97, "043": 97, "param_nam": [97, 102, 122, 135, 150, 153, 154, 179, 185], "disp": [97, 102, 122, 131, 135, 137, 142], "errorbar_kw": 97, "transpar": 97, "regim": 97, "oscil": 97, "donat": [97, 106, 142, 143, 146], "simplist": 97, "imposs": 97, "cv_results_logistic_regress": 98, "test_score_logistic_regress": 98, "815937": 98, "813849": 98, "815036": 98, "815569": 98, "810982": 98, "814831": 98, "813112": 98, "810368": 98, "812375": 98, "816306": 98, "most_frequent_classifi": 98, "cv_results_most_frequ": 98, "test_score_most_frequ": 98, "760329": 98, "756808": 98, "759142": 98, "760739": 98, "761681": 98, "761885": 98, "757463": 98, "757176": 98, "763114": 98, "all_test_scor": 98, "stratified_dummi": 98, "cv_results_stratifi": 98, "test_score_dummy_stratifi": 98, "uniform_dummi": 98, "cv_results_uniform": 98, "test_score_dummy_uniform": 98, "wrong": [98, 103, 124, 127, 153], "henc": [98, 108, 115, 133, 154], "uniformli": [98, 110, 137], "weakest": 98, "argu": 98, "permutation_test_scor": 98, "permut": [98, 179], "quit": [98, 99, 100, 102, 103, 105, 116, 141], "strongest": 98, "load_iri": [99, 179], "toi": [99, 139], "nine": 99, "data_random": 99, "randn": [99, 110, 115, 124, 127, 139], "train_index": 99, "test_index": 99, "six": 99, "train_cv_count": 99, "test_cv_count": 99, "fold_idx": 99, "train_idx": 99, "test_idx": 99, "enumer": [99, 109, 110, 121, 123, 125, 128, 134, 152], "idx": [99, 125, 163], "953": 99, "009": [99, 116], "frequenc": [99, 106, 142], "stratifiedkfold": [99, 143, 146], "967": 99, "past": [100, 106, 128, 130, 134, 136, 142], "ident": [100, 101, 116, 142, 152], "financi": 100, "quotat": 100, "tot": 100, "xom": 100, "exxon": 100, "cvx": 100, "chevron": 100, "cop": 100, "conocophillip": 100, "vlo": 100, "valero": 100, "template_nam": 100, "quot": 100, "stock": 100, "2f": [100, 101, 117, 120, 122, 128, 131, 133, 134, 137, 138, 139, 140, 142, 150, 154, 157, 163], "surprisingli": [100, 101, 107, 127], "outstand": 100, "eas": [100, 101, 103, 132, 139], "r2_score": 100, "verifi": [100, 113, 122, 139], "doesn": 100, "proper": [100, 107, 139, 152], "to_period": 100, "q": 100, "94": [100, 109], "forecast": 100, "ulterior": 100, "timeseriessplit": 100, "nuniqu": [100, 105, 161, 185], "118": 100, "511": 100, "shelv": 100, "absurd": 100, "intend": [101, 106, 177], "dive": 101, "area": [101, 102, 104, 110, 137, 142], "geograph": [101, 107, 118], "_california_housing_dataset": [101, 107], "20640": [101, 107], "medinc": [101, 107, 108], "houseag": [101, 107, 108], "averoom": [101, 107, 108, 156], "household": [101, 107], "avebedrm": [101, 107, 108], "aveoccup": [101, 107, 108], "member": [101, 107], "latitud": [101, 107, 108], "longitud": [101, 107, 108], "statlib": [101, 107], "dcc": [101, 107], "fc": [101, 107], "pt": [101, 107], "ltorgo": [101, 107], "cal_hous": [101, 107], "district": [101, 107, 108, 118], "hundr": [101, 107, 125], "deriv": [101, 105, 107, 129, 135, 139], "1990": [101, 107], "u": [101, 107], "smallest": [101, 107, 116, 133], "bureau": [101, 107], "600": [101, 107], "resid": [101, 107], "home": [101, 107], "empti": [101, 107], "vacat": [101, 107], "resort": [101, 107], "kellei": [101, 107], "ronald": [101, 107], "barri": [101, 107], "spatial": [101, 103, 107], "autoregress": [101, 107], "1997": [101, 107], "291": [101, 107], "297": [101, 107], "3252": [101, 107, 108], "984127": [101, 107, 108], "023810": [101, 107, 108], "322": [101, 107, 108, 116], "555556": [101, 107, 108], "3014": [101, 107, 108], "238137": [101, 107, 108], "971880": [101, 107, 108], "2401": [101, 107, 108], "109842": [101, 107, 108], "2574": [101, 107, 108], "288136": [101, 107, 108], "073446": [101, 107, 108], "496": [101, 107, 108, 153, 154], "802260": [101, 107, 108], "6431": [101, 107, 108], "817352": [101, 107, 108], "073059": [101, 107, 108], "558": [101, 107, 108], "547945": [101, 107, 108], "8462": [101, 107, 108], "281853": [101, 107, 108], "081081": [101, 107, 108], "565": [101, 107, 108], "181467": [101, 107, 108], "452": 101, "358": 101, "352": 101, "341": 101, "342": 101, "medhousev": [101, 107, 108], "decisiontreeregressorifitteddecisiontreeregressor": [101, 139, 164], "mean_absolute_error": [101, 113, 120, 121, 122, 138, 145], "grown": [101, 113, 117, 122], "leaf": [101, 117, 150, 154, 155, 157, 161, 163, 173, 174], "node": [101, 117, 119, 150, 155, 157, 161, 163, 172, 173, 174], "phenomena": 101, "unstabl": [101, 133], "wouldn": 101, "unlimit": [101, 117], "lucki": 101, "easiest": 101, "variant": 101, "139716": 101, "002720": 101, "909797": 101, "140345": 101, "002705": 101, "421170": 101, "138842": 101, "002797": 101, "411089": 101, "140532": 101, "002750": 101, "319824": 101, "137840": 101, "002907": 101, "607875": 101, "front": 101, "revert": [101, 129, 135], "negat": 101, "test_error": [101, 133], "139676": 101, "003097": 101, "901300": 101, "140465": 101, "003156": 101, "572767": 101, "140544": 101, "003002": 101, "194585": 101, "141134": 101, "002926": 101, "590236": 101, "141419": 101, "002745": 101, "727998": 101, "percentag": [101, 110, 145], "tag": [101, 128, 134], "expert": [101, 139], "16606712": 101, "16141486": 101, "16183972": 101, "16127944": 101, "15610027": 101, "00233054": 101, "00216913": 101, "00233126": 101, "00212193": 101, "00216627": 101, "26291527": 101, "41947109": 101, "44492564": 101, "23357874": 101, "40788361": 101, "extens": 101, "overal": [101, 113, 117, 118, 122, 133, 137, 142, 161], "fluctuat": [102, 139, 150], "hopefulli": [102, 117, 138], "260": 102, "305": 102, "harm": 102, "matter": [102, 126, 152], "compromis": [102, 142], "dispers": [102, 116], "usa": 103, "usd": 103, "outdat": 103, "fnlwgt": 103, "respond": 103, "columns_to_plot": 103, "pairwis": 103, "space": [103, 110, 112, 114, 115, 121, 123, 127, 129, 132, 133, 135, 136, 137, 141, 157, 159, 162, 163], "plotli": [103, 153, 156, 179], "parcoord": 103, "graph_object": 103, "labelencod": 103, "le": 103, "def": [103, 108, 110, 115, 128, 131, 132, 134, 137, 139, 140, 150, 153, 154, 161, 179], "generate_dict": 103, "col": [103, 116], "tickval": 103, "classes_": [103, 141, 142, 157, 163, 181], "ticktext": 103, "els": [103, 110, 139, 161], "plot_list": 103, "fig": [103, 104, 108, 132, 133, 142, 145, 153, 156, 157, 158, 179], "colorscal": 103, "viridi": [103, 105, 107, 153, 163, 179], "coordin": [103, 141, 152, 153, 156, 179], "hold": [103, 145, 153, 156, 179, 185], "undo": [103, 153], "4000": [103, 106], "spearman": 103, "rank": [103, 117, 123, 185], "pearson": 103, "versatil": 103, "meant": 103, "nomin": 103, "hierarchi": 103, "squareform": 103, "stat": [103, 117, 120, 154], "spearmanr": 103, "ax1": 103, "ax2": 103, "subplot": [103, 104, 108, 132, 133, 136, 142, 145, 157, 161, 162, 163], "corr": 103, "symmetr": [103, 117, 133, 145, 161], "fill_diagon": 103, "hierarch": 103, "ward": 103, "linkag": 103, "distance_matrix": 103, "ab": [103, 134, 136, 139], "dist_linkag": 103, "dendro": 103, "dendrogram": 103, "to_list": 103, "leaf_rot": 103, "dendro_idx": 103, "arang": [103, 105, 107, 108, 110, 136, 161, 162, 164], "ivl": 103, "imshow": [103, 163], "cmap": [103, 109, 131, 132, 137, 141, 150, 157, 161, 163], "coolwarm": 103, "set_xtick": 103, "set_ytick": [103, 108], "set_xticklabel": 103, "vertic": [103, 141], "set_yticklabel": [103, 108], "tight_layout": 103, "diverg": [103, 131, 137, 141, 156, 163], "colormap": [103, 131, 132, 137, 141, 163], "softer": [103, 141], "anti": 103, "dark": [103, 137], "directori": [104, 105, 106], "charact": 104, "marker": [104, 109, 142, 163], "pars": [104, 105], "lotconfig": 104, "208500": 104, "fr2": 104, "181500": 104, "223500": 104, "corner": [104, 142], "140000": 104, "250000": 104, "nin": 104, "tail": [104, 105, 107, 141], "coupl": [104, 105, 107, 116, 117, 133, 154], "core": [104, 105, 106, 107, 115, 116], "rangeindex": [104, 105, 106, 107], "null": [104, 105, 106, 107, 141], "1201": 104, "landslop": 104, "condition1": 104, "condition2": 104, "bldgtype": 104, "yearremodadd": 104, "roofstyl": 104, "roofmatl": 104, "exterior1st": 104, "exterior2nd": 104, "masvnrtyp": 104, "588": 104, "1452": 104, "exterqu": 104, "extercond": 104, "foundat": 104, "bsmtqual": 104, "1423": 104, "bsmtcond": 104, "bsmtexposur": 104, "1422": 104, "bsmtfintype1": 104, "bsmtfintype2": 104, "heat": 104, "heatingqc": 104, "centralair": 104, "bsmtfullbath": 104, "bsmthalfbath": 104, "kitchenqu": 104, "fireplacequ": 104, "770": 104, "garagetyp": 104, "1379": 104, "garageyrblt": 104, "garagefinish": 104, "garagequ": 104, "garagecond": 104, "paveddr": 104, "69": [104, 109, 150, 154], "281": 104, "901": 104, "kb": [104, 106], "numerical_data": 104, "410": 104, "layout": 104, "subplots_adjust": [104, 105, 107, 108], "hspace": [104, 105, 107], "wspace": [104, 107], "criterion": [104, 108, 157], "swim": 104, "pool": [104, 127], "string_data": 104, "490": [104, 127], "ceil": 104, "zip_longest": 104, "n_string_featur": 104, "nrow": [104, 142, 163], "ncol": [104, 132, 134, 142, 145, 163], "ravel": [104, 105, 134, 141, 163], "barh": [104, 106, 108, 137, 141, 142], "set_titl": [104, 139, 140, 142, 163], "databas": [104, 156], "grvl": 104, "gd": 104, "make_column_transform": [104, 119, 136], "most_frequent_imput": 104, "mean_imput": 104, "ames_housing_preprocess": 104, "tolist": [104, 130, 136, 157, 163], "timestamp": 105, "150": [105, 109], "0880": 105, "033870": 105, "161": [105, 116, 157], "336": 105, "0842": 105, "033571": 105, "163": 105, "409": 105, "0234": 105, "033223": 105, "156": 105, "445": 105, "0016": 105, "032908": 105, "148": 105, "441": 105, "1144": 105, "38254": 105, "38253": 105, "mb": [105, 107], "str": 105, "datetim": 105, "direct": [105, 132, 137, 154], "reopen": 105, "09": [105, 133], "explan": [105, 158], "soup": 105, "blender": 105, "blend": [105, 132], "veget": 105, "instantan": 105, "profession": 105, "calibr": 105, "track": 105, "spent": [105, 125], "food": 105, "uranium": 105, "petrol": 105, "ga": 105, "coal": 105, "plant": 105, "400": 105, "cheaper": [105, 108], "w": [105, 145, 163], "deliv": 105, "breakout": 105, "kilomet": 105, "costli": [105, 137, 149, 150, 156], "cruis": 105, "datetime64": 105, "ns": 105, "freq": 105, "august": 105, "septemb": 105, "date_first_rid": 105, "cycling_rid": 105, "data_rid": 105, "target_rid": 105, "tempor": 105, "resolut": [105, 154], "smoother": [105, 110], "tmp": [105, 142], "ipykernel_3158": 105, "3967367282": 105, "py": [105, 116, 133, 142, 143, 146], "futurewarn": [105, 142], "deprec": [105, 142], "set_xlabel": [105, 145, 163], "extremum": 105, "rng": [105, 107, 108, 110, 115, 127, 139], "randomst": [105, 107, 108, 110, 115, 127, 132, 139], "quantiz": [105, 107], "midpoint": [105, 107], "interv": [105, 107, 110, 112, 115, 121, 160, 162, 164], "qcut": [105, 107], "retbin": [105, 107], "lambda": [105, 107, 153, 179], "mid": [105, 107], "palett": [105, 107, 109, 131, 137, 141, 157, 161, 163], "uphil": 105, "physiolog": 105, "stimuli": 105, "recenc": [106, 142], "monetari": [106, 142], "12500": 106, "98": [106, 108, 157], "3250": [106, 140], "6000": 106, "month": [106, 142], "cm\u00b3": [106, 142], "748": 106, "747": 106, "noth": [106, 110], "shock": 106, "her": 106, "762032": 106, "237968": 106, "strike": 106, "fetch": 107, "internet": 107, "california_h": 107, "526": 107, "585": 107, "521": [107, 142], "413": [107, 154], "422": [107, 142], "demographi": 107, "granular": [107, 142], "20639": 107, "640": [107, 157], "unnotic": 107, "features_of_interest": [107, 133], "429000": 107, "096675": 107, "070655": 107, "1425": 107, "476744": 107, "474173": 107, "473911": 107, "386050": 107, "1132": 107, "462122": 107, "846154": 107, "333333": 107, "692308": 107, "440716": 107, "006079": 107, "429741": 107, "787": [107, 151], "229129": 107, "048780": 107, "818116": 107, "1166": 107, "052381": 107, "099526": 107, "282261": 107, "1725": 107, "141": 107, "909091": 107, "066667": 107, "1243": 107, "35682": 107, "huge": 107, "datapoint": [107, 137], "coast": 107, "big": [107, 145], "citi": [107, 145], "san": 107, "diego": 107, "lo": 107, "angel": 107, "jose": 107, "francisco": 107, "columns_drop": 107, "distinguish": 107, "curiou": [107, 185], "553": [107, 142], "062": 107, "coef": [107, 108, 133, 136, 137, 141], "est": [107, 133], "spot": [107, 133], "10000": 108, "100k": 108, "assert": [108, 117, 185], "un": [108, 133], "bin_var": 108, "randint": [108, 120, 124, 127], "rnd_bin": 108, "num_var": 108, "rnd_num": 108, "x_with_rnd_feat": 108, "x_train": 108, "x_test": 108, "y_train": [108, 174], "y_test": 108, "train_dataset": 108, "insert": [108, 131, 137], "kde": 108, "scatter_kw": 108, "x_i": 108, "versu": [108, 145, 165], "6013465992564662": 108, "5975757977248636": 108, "Its": 108, "somehow": 108, "rest": [108, 139, 163], "worth": 108, "habit": 108, "nb": 108, "outcom": [108, 141, 142, 156], "shall": [108, 110], "rise": 108, "80k": 108, "gaug": 108, "decad": 108, "visibl": [108, 145], "dev": 108, "601315755610292": 108, "5972410717953758": 108, "safe": 108, "perturb": 108, "repeatedkfold": 108, "cv_model": 108, "n_repeat": [108, 125, 126], "boxplot": [108, 130, 136], "cyan": 108, "satur": 108, "pretti": 108, "l1": 108, "015": 108, "5899811014945939": 108, "5769786920519312": 108, "partli": 108, "multivari": 108, "instabl": 108, "teas": 108, "9802456390477668": 108, "8472757276858796": 108, "formal": 108, "brought": 108, "argsort": [108, 125], "9797817485556926": 108, "8468741418387562": 108, "get_score_after_permut": 108, "curr_feat": 108, "x_permut": 108, "col_idx": 108, "permuted_scor": 108, "get_feature_import": 108, "baseline_score_train": 108, "permuted_score_train": 108, "feature_import": 108, "684": 108, "list_feature_import": 108, "n_round": 108, "672": 108, "0104": 108, "heavili": 108, "permutation_import": 108, "calcul": [108, 109, 142], "importances_mean": 108, "importances_std": 108, "plot_feature_import": 108, "perm_importance_result": 108, "feat_nam": 108, "xerr": 108, "perm_importance_result_train": 108, "realist": [108, 130, 136, 141], "unclear": 108, "culmen_column": [109, 131, 137, 141, 157, 158, 159, 163], "purposefulli": 109, "unlik": [109, 113, 122, 157], "misclassifi": [109, 132, 137], "decisiontreeclassifi": [109, 119, 143, 146, 157, 161, 163], "tab": [109, 110, 115, 131, 132, 136, 137, 141, 142, 157, 161, 162, 163], "decisiontreeclassifierifitteddecisiontreeclassifi": [109, 157, 163], "misclassified_samples_idx": 109, "flatnonzero": 109, "data_misclassifi": 109, "decisionboundarydisplai": [109, 131, 132, 137, 141, 157, 159, 161, 163], "response_method": [109, 131, 132, 137, 141, 157, 159, 161, 163], "rdbu": [109, 132, 161], "center": [109, 132, 134, 153, 156, 161], "nwith": [109, 115], "misclassif": [109, 137, 142], "sample_weight": 109, "trick": 109, "drastic": 109, "qualit": [109, 110, 128, 134, 163], "newly_misclassified_samples_idx": 109, "remaining_misclassified_samples_idx": 109, "intersect1d": 109, "ensemble_weight": 109, "935672514619883": 109, "6929824561403509": 109, "adaboostclassifi": 109, "samm": 109, "adaboostclassifierifittedadaboostclassifi": 109, "decisiontreeclassifierdecisiontreeclassifi": 109, "boosting_round": 109, "estimators_": [109, 110, 112, 121], "to_numpi": [109, 110, 121, 142], "640x480": 109, "estimator_weights_": 109, "58351894": 109, "46901998": 109, "03303773": 109, "estimator_errors_": 109, "05263158": 109, "05864198": 109, "08787269": 109, "sens": [109, 164], "generate_data": [110, 115], "x_min": [110, 115], "x_max": [110, 115], "capabl": [110, 115, 133, 142, 160, 162, 164], "y_pred": [110, 143, 145, 146], "data_bootstrap": 110, "target_bootstrap": 110, "bootstrap_sampl": 110, "bootstrap_indic": 110, "n_bootstrap": 110, "bootstrap_idx": 110, "facecolor": 110, "180": [110, 140], "linewidth": [110, 131, 132, 137, 161], "darker": [110, 131, 137, 141], "data_train_hug": 110, "data_test_hug": 110, "target_train_hug": 110, "100_000": 110, "data_bootstrap_sampl": 110, "target_bootstrap_sampl": 110, "ratio_unique_sampl": 110, "bag_of_tre": 110, "tree_idx": [110, 121], "tree_predict": [110, 121], "feed": 110, "bag_predict": 110, "unbroken": [110, 115], "whole": [110, 112, 117, 119, 121, 133, 139], "meta": 110, "wrap": [110, 133, 137, 165], "snippet": [110, 130, 136, 179], "smooth": [110, 132, 137], "bagged_tre": [110, 119], "bagged_trees_predict": 110, "opac": 110, "appreci": 110, "polynomialfeatur": [110, 129, 130, 132, 133, 135, 136, 139], "polynomial_regressor": 110, "1e": [110, 116, 131, 137, 151, 154], "intention": 110, "simpli": [110, 163], "regressor_predict": 110, "base_model_lin": 110, "bagging_predict": 110, "ylim": [110, 136, 142], "shade": 110, "randomizedsearchcv": [111, 117, 120, 149, 154, 156, 179], "penguins_regress": [112, 121, 128, 134, 138, 140, 158, 160, 161, 162, 164], "evenli": [112, 121], "170": [112, 121], "230": [112, 121], "newli": [112, 121], "conduct": [113, 122, 156], "learning_r": [113, 114, 117, 122, 123, 148, 150, 153, 154, 155, 179, 181], "slower": [113, 122, 135, 152], "offer": [113, 122, 154], "certainli": [113, 122], "n_iter_no_chang": [113, 122], "max_leaf_nod": [114, 117, 123, 148, 150, 152, 153, 154, 155, 161, 179, 181], "residu": [115, 117, 145, 154], "back": [115, 141, 142, 153, 157], "len_x": 115, "rand": [115, 139], "target_train_predict": 115, "target_test_predict": 115, "line_predict": 115, "lines_residu": 115, "edit": 115, "initi": [115, 152, 182], "tree_residu": 115, "target_train_predicted_residu": 115, "target_test_predicted_residu": 115, "manag": 115, "x_sampl": 115, "target_tru": 115, "target_true_residu": 115, "commit": [115, 145], "y_pred_first_tre": 115, "517": 115, "393": 115, "145": 115, "248": [115, 116, 146], "y_pred_first_and_second_tre": 115, "gradientboostingregressor": [115, 116, 122], "gradient_boost": [115, 116], "cv_results_gbdt": [115, 116], "446": 115, "919": 115, "982": 115, "007": [115, 116], "random_forest": [115, 119], "cv_results_rf": 115, "428": 115, "436": 115, "172": 115, "085": 115, "brute": [116, 138], "overcom": [116, 118, 132, 139], "benchmark": 116, "394": 116, "911": 116, "995": 116, "kbinsdiscret": [116, 132, 139], "n_bin": [116, 132, 139], "quantil": [116, 132], "data_tran": 116, "opt": [116, 133, 143, 146], "hostedtoolcach": [116, 133, 143, 146], "x64": [116, 133, 143, 146], "lib": [116, 133, 143, 146], "python3": [116, 133, 143, 146], "site": [116, 133, 143, 146], "_discret": 116, "userwarn": [116, 143, 146], "249": 116, "231": 116, "162": 116, "203": [116, 143, 146], "242": 116, "125": 116, "160": 116, "126": 116, "136": 116, "93": 116, "199": 116, "253": 116, "207": 116, "235": [116, 121, 164], "022": 116, "375": 116, "histogram_gradient_boost": 116, "cv_results_hgbdt": 116, "758": 116, "694": 116, "551": 116, "060": 116, "clariti": 117, "doubl": [117, 150, 151], "max_featur": [117, 119, 120], "grow": [117, 118, 161, 179], "uncorrel": 117, "constraint": [117, 133, 161], "min_samples_leaf": [117, 118, 153, 154, 161, 179], "branch": [117, 161], "promot": [117, 136], "altogeth": 117, "param_distribut": [117, 154, 156], "search_cv": 117, "n_iter": [117, 120, 149, 154, 156, 179], "param_": [117, 120, 123, 150, 154], "mean_test_error": [117, 120], "std_test_error": [117, 120], "cv_results_": [117, 120, 123, 150, 152, 154, 156, 179], "mean_test_scor": [117, 120, 123, 150, 152, 153, 154, 156, 179], "std_test_scor": [117, 120, 150, 152, 153, 154], "sort_valu": [117, 120, 150, 154, 156], "param_max_featur": [117, 120], "param_max_leaf_nod": 117, "param_min_samples_leaf": 117, "978155": 117, "564657": 117, "946351": 117, "544967": 117, "361681": 117, "392600": 117, "056250": 117, "529362": 117, "384198": 117, "653690": 117, "705012": 117, "557795": 117, "814857": 117, "973013": 117, "929450": 117, "916330": 117, "681239": 117, "385049": 117, "024546": 117, "781209": 117, "role": 117, "inter": 117, "refit": [117, 148, 152, 155], "overlook": 117, "loguniform": [117, 154], "param_max_it": 117, "param_learning_r": 117, "01864": 117, "059711": 117, "305289": 117, "047293": 117, "886194": 117, "294858": 117, "176656": 117, "620216": 117, "349642": 117, "297739": 117, "023759": 117, "825347": 117, "083745": 117, "104171": 117, "400591": 117, "215543": 117, "241217": 117, "301977": 117, "067503": 117, "780190": 117, "449252": 117, "05929": 117, "887688": 117, "400111": 117, "160519": 117, "337594": 117, "372942": 117, "125207": 117, "015150": 117, "814681": 117, "054511": 117, "191347": 117, "690748": 117, "248463": 117, "977311": 117, "593183": 117, "906226": 117, "187714": 117, "847621": 117, "061034": 117, "712506": 117, "707332": 117, "079415": 117, "447912": 117, "900105": 117, "0351": 117, "512730": 117, "998659": 117, "019923": 117, "645082": 117, "109315": 117, "039361": 117, "766862": 117, "042788": 117, "019351": 117, "341590": 117, "090469": 117, "01724": 117, "857731": 117, "137648": 117, "hgbt": 117, "hassl": 118, "354": 118, "087": [118, 135], "min_samples_split": [118, 161], "523": [118, 134], "107": 118, "bagging_regressor": 118, "642": 118, "083": 118, "decent": [118, 153, 154], "modif": 119, "inject": 119, "decorrel": 119, "categorical_encod": 119, "scores_tre": 119, "820": 119, "scores_bagged_tre": 119, "846": 119, "005": 119, "randomforestclassifi": [119, 125, 126], "scores_random_forest": 119, "004": 119, "disabl": 119, "sqrt": 119, "literatur": 119, "agnost": 119, "param": [120, 123, 132, 150, 153], "bootstrap_featur": 120, "estimator__ccp_alpha": 120, "estimator__criterion": 120, "estimator__max_depth": 120, "estimator__max_featur": 120, "estimator__max_leaf_nod": 120, "estimator__min_impurity_decreas": 120, "estimator__min_samples_leaf": 120, "estimator__min_samples_split": 120, "estimator__min_weight_fraction_leaf": 120, "estimator__monotonic_cst": 120, "estimator__random_st": 120, "estimator__splitt": 120, "max_sampl": 120, "oob_scor": 120, "verbos": [120, 151, 154, 156, 181], "warm_start": 120, "param_n_estim": 120, "param_max_sampl": 120, "param_estimator__max_depth": 120, "395300": 120, "200372": 120, "894554": 120, "132407": 120, "299403": 120, "007797": 120, "852611": 120, "845432": 120, "470246": 120, "165325": 120, "650233": 120, "928890": 120, "302352": 120, "151084": 120, "315767": 120, "849588": 120, "324889": 120, "077862": 120, "356723": 120, "042321": 120, "708955": 120, "161825": 120, "895668": 120, "964202": 120, "318367": 120, "415482": 120, "755615": 120, "456216": 120, "194714": 120, "829366": 120, "364199": 120, "091940": 120, "489622": 120, "884751": 120, "606614": 120, "405458": 120, "209962": 120, "954084": 120, "757686": 120, "885452": 120, "gram": [121, 129, 135, 138], "380": 121, "633": 121, "data_rang": 121, "forest_predict": 121, "n_estimators_": 122, "201": 122, "hist_gbdt": 123, "839": 123, "best_estimator_": 123, "528": 123, "447": 123, "576": 123, "290": 123, "414": 123, "index_column": 123, "inner_cv_result": 123, "cv_idx": 123, "search_cv_result": 123, "set_index": [123, 131, 137, 141, 147], "renam": [123, 150, 153, 154, 156, 179], "coincid": [123, 142], "bioinformat": [124, 127], "rna": [124, 127], "seq": [124, 127], "ten": [124, 127], "anova": [124, 125, 127], "feature_select": [124, 125, 126, 127], "selectkbest": [124, 125, 127], "f_classif": [124, 125, 127], "pre": [124, 127], "princip": 125, "make_classif": [125, 126], "n_inform": [125, 126], "n_redund": [125, 126], "univari": 125, "model_without_select": [125, 126], "model_with_select": [125, 126], "score_func": [125, 127], "cv_results_without_select": [125, 126], "incorpor": 125, "cv_results_with_select": [125, 126], "analyz": [125, 133, 179], "swap": 125, "swaplevel": [125, 126], "Of": 125, "scores_": 125, "percentil": 125, "alien": 125, "primari": 125, "feature_importances_": 126, "suffici": [126, 132], "class_sep": 126, "selectfrommodel": 126, "feature_selector": [126, 127], "overestim": 126, "100000": 127, "data_subset": 127, "940": 127, "succeed": 127, "legit": 127, "leak": 127, "data_train_subset": 127, "520": 127, "460": 127, "boilerpl": 127, "linear_model_flipper_mass": [128, 134, 140], "flipper_length": [128, 134, 140], "weight_flipper_length": [128, 134, 138, 140], "intercept_body_mass": [128, 134, 138, 140], "body_mass": [128, 134, 140], "flipper_length_rang": [128, 134, 138, 140], "goodness_fit_measur": [128, 134], "true_valu": [128, 134], "scalar": [128, 134], "model_idx": [128, 134], "x1": [129, 135, 141], "x2": [129, 135], "x3": [129, 135], "penguins_non_miss": [129, 135, 185], "181": [129, 135, 140], "186": [129, 135, 140], "195": [129, 135, 140], "193": [129, 135, 140, 154], "190": [129, 135, 140], "sign": [129, 135], "interaction_onli": [129, 130, 135, 136], "intermedi": [129, 135, 139, 152, 153], "moment": [130, 136, 157], "15024": [130, 136], "reload": [130, 136, 150, 154], "concern": [130, 136, 152], "named_transformers_": [130, 136], "get_feature_names_out": [130, 136], "metion": [131, 137], "infinit": [131, 137], "invers": [131, 137], "yourself": [131, 137], "penguins_train": [131, 137, 141], "penguins_test": [131, 137, 141], "vmin": [131, 132, 137, 150, 157, 163], "vmax": [131, 132, 137, 150, 157, 163], "plot_decision_boundari": [131, 132, 137], "logisticregression__c": [131, 137, 179, 181], "plot_method": [131, 132, 137], "pcolormesh": [131, 132, 137], "rdbu_r": [131, 137, 141], "contour": [131, 132, 137], "candid": [131, 137, 154, 156, 157], "cs": [131, 137], "1e6": [131, 137], "queri": [131, 133, 137], "kernel_approxim": [131, 132, 135, 137, 139], "nevertheless": 132, "moon": 132, "crescent": 132, "make_moon": 132, "newaxi": [132, 161], "data_moon": 132, "target_moon": 132, "gaussian": 132, "edg": 132, "concentr": 132, "make_gaussian_quantil": 132, "n_class": [132, 141, 163], "gauss": 132, "data_gauss": 132, "target_gauss": 132, "xor": 132, "OR": 132, "target_xor": 132, "logical_xor": 132, "int32": [132, 149, 156, 161], "data_xor": 132, "glanc": 132, "listedcolormap": 132, "constrained_layout": 132, "common_scatter_plot_param": 132, "middl": [132, 151], "set_ylabel": [132, 145, 163], "soft": [132, 141], "unsur": [132, 141], "attempt": [132, 133, 137], "leverag": 132, "spline": [132, 139], "onehot": 132, "kbinsdiscretizerkbinsdiscret": [132, 139], "segment": 132, "rectangular": 132, "drawn": 132, "n_knot": 132, "splinetransformersplinetransform": [132, 139], "favor": 132, "curvi": [132, 137], "knot": 132, "include_bia": [132, 133, 135, 136, 139], "polynomialfeaturespolynomialfeatur": [132, 133, 136, 139], "nystr\u00f6m": [132, 135], "coef0": [132, 141], "nystroemnystroem": [132, 139], "expans": [132, 139], "intract": 132, "radial": 132, "basi": 132, "furthemor": 132, "induct": 132, "rotation": 132, "everywher": [132, 137], "drawback": 132, "orign": 132, "despit": 132, "augment": [132, 133], "interplai": 132, "linear_regress": [133, 135, 138, 139, 164], "train_error": 133, "2e": 133, "85e": 133, "63e": 133, "69e": 133, "47e": 133, "fortun": 133, "feature_names_in_": 133, "model_first_fold": 133, "pipelineifittedpipelin": 133, "linearregressionlinearregress": [133, 139], "weights_linear_regress": 133, "symlog": 133, "homogen": 133, "choleski": 133, "_ridg": 133, "204": 133, "linalgwarn": 133, "rcond": 133, "59923e": 133, "linalg": 133, "xy": 133, "assume_a": 133, "po": [133, 142], "overwrite_a": 133, "59556e": 133, "59609e": 133, "11828e": 133, "06109e": 133, "60121e": 133, "61694e": 133, "59735e": 133, "59566e": 133, "72304e": 133, "60047e": 133, "59824e": 133, "59593e": 133, "59564e": 133, "5959e": 133, "59553e": 133, "59686e": 133, "60737e": 133, "5957e": 133, "60243e": 133, "90e": 133, "56e": 133, "55e": 133, "68e": 133, "weights_ridg": 133, "shrunk": 133, "worst": [133, 142], "saga": 133, "lsqr": 133, "re": [133, 177, 185], "resolv": 133, "omit": 133, "annual": 133, "neutral": [133, 163], "ahead": 133, "scaled_ridg": 133, "78e": 133, "21e": 133, "83e": 133, "17e": 133, "sweet": 133, "weights_ridge_scaled_data": 133, "ridge_large_alpha": 133, "1_000_000": 133, "unpredict": 133, "occurr": 133, "presenc": [133, 147], "divis": 133, "beforehand": 133, "store_cv_valu": 133, "12e": 133, "25e": 133, "50e": 133, "40e": 133, "mse_alpha": 133, "cv_values_": 133, "cv_alpha": 133, "000000e": 133, "841881e": 133, "347783e": 133, "321941e": 133, "837563e": 133, "343115e": 133, "747528e": 133, "831866e": 133, "336956e": 133, "310130e": 133, "824352e": 133, "328835e": 133, "053856e": 133, "814452e": 133, "318133e": 133, "274549e": 133, "319038e": 133, "337394e": 133, "328761e": 133, "324503e": 133, "338181e": 133, "722368e": 133, "328652e": 133, "338778e": 133, "564633e": 133, "331799e": 133, "339232e": 133, "334185e": 133, "339576e": 133, "yerr": 133, "yscale": 133, "salt": 133, "cook": 133, "best_alpha": 133, "11497569953977356": 133, "35111917342151344": 133, "1519911082952933": 133, "4641588833612782": 133, "08697490026177834": 133, "6135907273413176": 133, "stem": [133, 145], "summari": 133, "wasn": 133, "disproportion": 133, "15000": 134, "14000": 134, "predicted_body_mass": [134, 138, 140], "misleadingli": 134, "mse": [134, 139, 145, 147], "2764": 134, "854": 134, "338": 134, "573": 134, "041": 134, "337": 135, "071": 135, "868": 135, "poly_featur": 135, "linear_regression_interact": 135, "7077": 135, "3384": 135, "731": 135, "7347": 135, "3236": 135, "687": 135, "7858": 135, "3510": 135, "725": 135, "7083": 135, "3724": 135, "708": 135, "7467": 135, "3914": 135, "809": 135, "flipper_length_first_sampl": 135, "culmen_depth_first_sampl": 135, "301": 135, "790": 135, "340": 135, "spread": [135, 137, 153, 179], "enrich": 135, "nystroem_regress": [135, 139], "nystroem__n_compon": 135, "set_param": [135, 137, 151, 155, 181, 185], "299": 135, "874": 135, "4950": 135, "5050": 135, "footprint": 135, "scalabl": 135, "cv_results_lr": 136, "test_score_lr": 136, "79856704": 136, "79283521": 136, "79668305": 136, "80487305": 136, "80036855": 136, "79914005": 136, "79750205": 136, "7993448": 136, "80528256": 136, "80405405": 136, "causal": 136, "5_000": 136, "cv_results_complex_lr": 136, "test_score_complex_lr": 136, "85281474": 136, "85056295": 136, "84971335": 136, "8474611": 136, "84807535": 136, "84684685": 136, "85565111": 136, "8507371": 136, "85872236": 136, "8515561": 136, "workclass_infrequent_sklearn": 136, "education_infrequent_sklearn": 136, "status_": 136, "absent": 136, "widow": 136, "status_infrequent_sklearn": 136, "occupation_": 136, "adm": 136, "cleric": 136, "craft": 136, "repair": 136, "exec": 136, "manageri": 136, "handler": 136, "cleaner": 136, "tech": 136, "occupation_infrequent_sklearn": 136, "relationship_": 136, "race_": 136, "asian": 136, "pac": 136, "island": 136, "race_infrequent_sklearn": 136, "sex_": 136, "country_infrequent_sklearn": 136, "education_doctor": 136, "model_with_interact": 136, "cv_results_interact": 136, "test_score_interact": 136, "85383828": 136, "8527846": 136, "85298935": 136, "84930385": 136, "8503276": 136, "85462735": 136, "8523751": 136, "85176085": 136, "act": 136, "rapid": 136, "sigmoid": [137, 141], "nearli": 137, "steep": 137, "deduc": [137, 158], "lai": 137, "zone": 137, "weaker": 137, "light": 137, "lr_weight": 137, "perpendicular": [137, 157], "lowest": [137, 138, 145], "anywher": 137, "minor": 137, "blob": [137, 161], "frontier": 137, "conjunct": 137, "certainti": [137, 163], "linearregressionifittedlinearregress": 138, "68556640610011": 138, "5780": 138, "831358077066": 138, "mean_squared_error": [138, 139, 145], "inferred_body_mass": 138, "model_error": 138, "154546": 138, "313": 138, "occas": 139, "cubic": [139, 174], "said": [139, 145, 147], "data_max": 139, "data_min": 139, "len_data": 139, "sort": 139, "full_data": 139, "input_featur": 139, "reshap": [139, 145, 163], "fit_score_plot_regress": 139, "linearregressioninot": 139, "fittedlinearregress": 139, "global": 139, "data_expand": 139, "polynomial_expans": 139, "polynomial_regress": 139, "encourag": [139, 145], "svr": 139, "svrinot": 139, "fittedsvr": 139, "medium": 139, "10_000": [139, 157], "binned_regress": 139, "spline_regress": 139, "expand": 139, "3750": 140, "3800": 140, "3450": 140, "3650": 140, "2700": 140, "6300": 140, "heavier": [140, 158], "formula": 140, "shorter": 140, "13000": 140, "millimet": 140, "body_mass_180": 140, "body_mass_181": 140, "7200": 140, "7240": 140, "goe": [140, 142], "170mm": 140, "230mm": 140, "redefin": 140, "groupbi": 141, "inclin": 141, "x0": 141, "coef1": 141, "obliqu": [141, 157], "724791": 141, "096371": 141, "readi": 141, "barplot": 141, "horizont": [141, 161, 163], "hypothet": 141, "test_penguin": 141, "y_pred_proba": [141, 157], "1714923": 141, "8285077": 141, "y_proba_sampl": 141, "insist": 141, "overconfid": 141, "underconfid": 141, "asymptot": 141, "softmax": 141, "donor": 142, "ago": 142, "new_donor": 142, "That": [142, 147, 150, 152], "258": 142, "505": 142, "665": 142, "615": 142, "743": 142, "374": 142, "7780748663101604": 142, "accuracy_scor": 142, "778": 142, "finer": 142, "confusionmatrixdisplai": 142, "incorrect": 142, "erron": 142, "tp": 142, "tn": 142, "fn": 142, "fp": 142, "precision_scor": [142, 143, 146], "recall_scor": 142, "pos_label": [142, 143, 146], "688": 142, "124": 142, "mislabel": 142, "ratio": 142, "dummy_classifi": 142, "762": 142, "balanced_accuracy_scor": 142, "haven": 142, "target_proba_predict": 142, "271818": 142, "728182": 142, "451765": 142, "548235": 142, "445210": 142, "554790": 142, "441577": 142, "558423": 142, "870588": 142, "129412": 142, "equivalence_pred_proba": 142, "idxmax": 142, "graph": 142, "precisionrecalldisplai": 142, "tpr": 142, "ppv": 142, "ap": 142, "preval": 142, "ipykernel_5009": 142, "2781295333": 142, "__getitem__": 142, "ser": 142, "discrimin": 142, "roccurvedisplai": 142, "dash": 142, "plot_chance_level": 142, "pr": 142, "chance_level_kw": 142, "ambigu": [143, 146], "valueerror": [143, 146], "exc": [143, 146], "_valid": [143, 146], "1011": [143, 146], "recent": [143, 146], "_scorer": [143, 146], "137": [143, 145, 146], "__call__": [143, 146], "scorer": [143, 144, 146, 147], "_score": [143, 146], "345": [143, 146], "method_cal": [143, 146], "_cached_cal": [143, 146], "_get_response_valu": [143, 146], "_respons": [143, 146], "catch": [143, 146], "make_scor": [143, 146], "syntax": [144, 147], "iowa": 145, "intro": [145, 165], "996": 145, "902": 145, "2064": 145, "736": 145, "6872520581075443": 145, "dummy_regressor": 145, "608": 145, "disadvantag": 145, "median_absolute_error": 145, "mean_absolute_percentage_error": 145, "574": 145, "obsev": 145, "unobserv": 145, "extern": [145, 152], "cloud": 145, "against": 145, "exhibit": 145, "predictionerrordisplai": 145, "from_predict": 145, "y_true": 145, "actual_vs_predict": 145, "scatter_kwarg": 145, "residual_vs_predict": 145, "nwithout": 145, "banana": 145, "smile": 145, "clue": 145, "monoton": 145, "quantiletransform": [145, 185], "transformedtargetregressor": 145, "n_quantil": [145, 185], "900": 145, "output_distribut": 145, "model_transformed_target": 145, "ntransform": 145, "406": 145, "327": [145, 154], "disapprov": 145, "statistician": 145, "justifi": 145, "poissonregressor": 145, "tweedieregressor": 145, "reachabl": 145, "626": 146, "499": [146, 153, 154], "112": [146, 154], "166": 146, "00248313": 146, "00258851": 146, "00256777": 146, "002455": 146, "00250387": 146, "00245094": 146, "00240254": 146, "00262642": 146, "00248528": 146, "00246668": 146, "00242066": 146, "0023427": 146, "00231647": 146, "002321": 146, "00233245": 146, "00230694": 146, "00229311": 146, "00232673": 146, "0023334": 146, "00232935": 146, "test_accuraci": 146, "29333333": 146, "53333333": 146, "77333333": 146, "70666667": 146, "66216216": 146, "74324324": 146, "test_balanced_accuraci": 146, "42105263": 146, "48391813": 146, "66081871": 146, "3874269": 146, "43274854": 146, "44736842": 146, "55994152": 146, "73684211": 146, "49174407": 146, "50309598": 146, "794": 147, "892": 147, "225": 147, "test_r2": 147, "test_neg_mean_absolute_error": 147, "848721": 147, "256799": 147, "816374": 147, "084083": 147, "813513": 147, "113367": 147, "814138": 147, "448279": 147, "637473": 147, "370341": 147, "defaultdict": 147, "loss_funct": 147, "squared_error": 147, "absolute_error": 147, "loss_func": 147, "test_neg_mean_squared_error": 147, "243": 147, "923": 147, "344": [147, 154], "evolv": 147, "discontinu": 147, "surrog": 147, "substitut": 147, "log_loss": 147, "exhaust": [148, 155, 182], "cat_preprocessor": [148, 150, 152, 154, 155], "kneighborsregressor": [149, 156], "with_mean": [149, 156], "with_std": [149, 156], "dealt": 150, "ordinalencoderordinalencod": [150, 152, 154], "passthroughpassthrough": [150, 152, 154], "histgradientboostingclassifierhistgradientboostingclassifi": [150, 152, 154], "classifier__learning_r": [150, 152, 154, 155], "classifier__max_leaf_nod": [150, 152, 154, 155], "model_grid_search": [150, 152], "pipelinepipelin": [150, 152, 154], "charg": 150, "rapidli": 150, "ascend": [150, 154, 156], "mean_fit_tim": [150, 153], "std_fit_tim": [150, 153], "mean_score_tim": [150, 153], "std_score_tim": [150, 153], "param_classifier__learning_r": [150, 152, 153], "param_classifier__max_leaf_nod": [150, 152, 153], "split0_test_scor": [150, 153], "split1_test_scor": [150, 153], "rank_test_scor": [150, 152, 153, 154], "379503": 150, "037274": 150, "188996": 150, "010358": 150, "868912": 150, "867213": 150, "868063": 150, "000850": 150, "297463": 150, "004903": 150, "166850": 150, "000900": 150, "866783": 150, "866066": 150, "866425": 150, "000359": 150, "097578": 150, "001023": 150, "067673": 150, "004921": 150, "classifier__": 150, "854826": 150, "862899": 150, "858863": 150, "004036": 150, "122485": 150, "025753": 150, "074820": 150, "004542": 150, "853844": 150, "860934": 150, "857389": 150, "003545": 150, "208518": 150, "003173": 150, "111752": 150, "002506": 150, "852752": 150, "853781": 150, "853266": 150, "000515": 150, "shorten": 150, "param_classifier__": 150, "prefix": [150, 153], "column_result": [150, 154], "shorten_param": [150, 153, 154, 179], "__": [150, 151, 153, 154, 179], "rsplit": [150, 153, 154, 179], "851028": 150, "002707": 150, "843330": 150, "002917": 150, "817832": 150, "001124": 150, "797166": 150, "000715": 150, "618080": 150, "124277": 150, "549338": 150, "210599": 150, "283476": 150, "003775": 150, "heatmap": [150, 153], "pivoted_cv_result": 150, "pivot_t": 150, "ylgnbu": 150, "invert_yaxi": 150, "degrad": 150, "patholog": 150, "accordingli": 150, "hyperparamt": [150, 157], "recogniz": 151, "spell": 151, "classifier__c": [151, 179, 181], "hyperparameter_nam": 151, "preprocessor__copi": 151, "preprocessor__with_mean": 151, "preprocessor__with_std": 151, "classifier__class_weight": 151, "classifier__du": 151, "classifier__fit_intercept": 151, "classifier__intercept_sc": 151, "classifier__l1_ratio": 151, "classifier__max_it": 151, "classifier__multi_class": 151, "classifier__n_job": 151, "classifier__penalti": 151, "classifier__random_st": 151, "classifier__solv": 151, "classifier__tol": 151, "classifier__verbos": 151, "classifier__warm_start": 151, "001": [151, 154], "799": 151, "433321": 152, "068195": 152, "863241": 152, "429161": 152, "069802": 152, "860784": 152, "430613": 152, "069127": 152, "860360": [152, 153], "427582": 152, "070667": 152, "862408": [152, 153], "429843": 152, "068020": 152, "866912": 152, "863": 152, "embed": 152, "864195": 152, "000061": 152, "870910": 152, "869457": 152, "000819": 152, "866365": 152, "001822": 152, "877": 152, "schemat": 152, "green": [152, 157, 163], "rough": 152, "cv_test_scor": 152, "871": 152, "apprehend": 152, "cv_inner": 152, "cv_outer": 152, "cv_fold": 152, "estimator_in_fold": 152, "vote": 152, "randomized_search_result": [153, 154, 179], "param_classifier__l2_regular": 153, "param_classifier__max_bin": 153, "param_classifier__min_samples_leaf": 153, "split2_test_scor": 153, "split3_test_scor": 153, "split4_test_scor": 153, "540456": 153, "062725": 153, "052069": 153, "002661": 153, "467047": 153, "550075": 153, "classifier__l2_regular": [153, 154], "4670474863": 153, "856558": 153, "862271": 153, "857767": 153, "854491": 153, "856675": 153, "857552": 153, "002586": 153, "110536": 153, "033403": 153, "074142": 153, "002165": 153, "015449": 153, "001146": 153, "0154488709": 153, "758974": 153, "758941": 153, "758947": [153, 154], "000013": [153, 154], "323": [153, 157], "137484": 153, "053150": 153, "092993": 153, "029005": 153, "095093": 153, "004274": 153, "0950934559": 153, "783267": 153, "776413": 153, "779143": 153, "771341": 153, "010357": 153, "311": 153, "935108": 153, "202993": 153, "118105": 153, "023658": 153, "003621": 153, "001305": 153, "164": 153, "0036210968": 153, "255219": 153, "038301": 153, "056048": 153, "016736": 153, "000081": 153, "407382": 153, "1060737427": 153, "495": 153, "452411": 153, "023006": 153, "055563": 153, "000846": 153, "000075": 153, "364373": 153, "4813767874": 153, "858332": 153, "865001": 153, "862681": 153, "860770": 153, "861429": 153, "002258": 153, "133042": 153, "014456": 153, "078186": 153, "002199": 153, "065946": 153, "001222": 153, "0659455480": 153, "497": [153, 154], "911828": 153, "017167": 153, "076563": 153, "005130": 153, "460025": 153, "044408": 153, "4600250010": 153, "839907": 153, "849713": 153, "846847": 153, "846028": 153, "844390": 153, "845377": 153, "003234": 153, "140": 153, "498": 153, "168120": 153, "121819": 153, "061283": 153, "000760": 153, "000068": 153, "287904": 153, "227": 153, "146": 153, "7755366885": 153, "861881": 153, "859951": 153, "861862": 153, "862221": 153, "001623": 153, "823774": 153, "120686": 153, "060351": 153, "014958": 153, "445218": 153, "005112": 153, "4452178932": 153, "764569": 153, "765902": 153, "764947": 153, "765083": 153, "765281": 153, "000535": 153, "319": 153, "l2_regular": [153, 154, 179], "max_bin": [153, 154, 179], "score_bin": 153, "cut": [153, 161], "set_palett": 153, "ylgnbu_r": 153, "set_xscal": 153, "set_yscal": 153, "band": 153, "px": [153, 156, 179], "parallel_coordin": [153, 156, 179], "log10": [153, 179], "log2": [153, 179], "color_continuous_scal": [153, 156, 179], "yellow": [153, 163], "tick": 153, "invert": 153, "consecut": 154, "untract": 154, "situat": 154, "stochast": 154, "loguniform_int": 154, "__init__": 154, "_distribut": 154, "rv": 154, "arg": 154, "kwarg": 154, "processor": 154, "1e3": 154, "classifier__min_samples_leaf": 154, "classifier__max_bin": 154, "255": 154, "model_random_search": [154, 156], "962": 154, "histgradientboostingc": 154, "_distn_infrastructur": 154, "rv_continuous_frozen": 154, "0x7f677f4b86a0": 154, "0x7f677f4aab20": 154, "__main__": 154, "0x7f677f4bc3a0": 154, "0x7f677f4bc130": 154, "0x7f677f4aaaf0": 154, "randomizedsearchcvifittedrandomizedsearchcv": 154, "pprint": 154, "011954994705001769": 154, "04397988125308962": 154, "129": 154, "011955": 154, "04398": 154, "869018": 154, "002866": 154, "851199": 154, "845263": 154, "144": 154, "108": 154, "852693": 154, "003954": 154, "001512": 154, "196641": 154, "844067": 154, "002766": 154, "000012": 154, "009697": 154, "840354": 154, "003787": 154, "605": 154, "906164": 154, "083779": 154, "829735": 154, "004207": 154, "012292": 154, "011544": 154, "224": 154, "805547": 154, "001448": 154, "000707": 154, "001796": 154, "000117": 154, "001665": 154, "000032": 154, "001894": 154, "135": 154, "850228": 154, "399645": 154, "233": 154, "756572": 154, "001422": 154, "to_csv": 154, "208": 154, "011775": 154, "076653": 154, "871393": 154, "001588": 154, "343": 154, "000404": 154, "244503": 154, "229": 154, "871339": 154, "002741": 154, "994918": 154, "077047": 154, "192": 154, "870793": 154, "001993": 154, "328": 154, "036232": 154, "224702": 154, "236": 154, "869837": 154, "000808": 154, "733808": 154, "036786": 154, "241": 154, "869673": 154, "002417": 154, "232": 154, "000097": 154, "976823": 154, "448205": 154, "253714": 154, "000001": 154, "828574": 154, "000003": 154, "091079": 154, "000444": 154, "236325": 154, "344629": 154, "207156": 154, "357": 154, "000026": 154, "075318": 154, "241053": 154, "valuabl": 154, "allevi": 154, "best_scor": 155, "best_param": 155, "lr": 155, "mln": 155, "mean_scor": 155, "010": 155, "789": 155, "813": 155, "842": 155, "847": 155, "852": 155, "828": 155, "288": 155, "480": 155, "639": 155, "best_lr": 155, "best_mln": 155, "870": 155, "kneighborsregressor__n_neighbor": 156, "standardscaler__with_mean": 156, "standardscaler__with_std": 156, "welcom": 156, "column_name_map": 156, "param_kneighborsregressor__n_neighbor": 156, "param_standardscaler__with_mean": 156, "param_standardscaler__with_std": 156, "boolean": 156, "column_scal": 156, "687926": 156, "674812": 156, "668778": 156, "648317": 156, "629772": 156, "215": 156, "617295": 156, "464": 156, "567164": 156, "508809": 156, "486503": 156, "103390": 156, "061394": 156, "033122": 156, "017583": 156, "007987": 156, "002900": 156, "238830": 156, "tealros": 156, "kneighbor": 156, "mpl": [157, 163], "tab10_norm": [157, 163], "dbd": 157, "tab10": [157, 163], "norm": [157, 163], "plot_tre": [157, 159, 161, 162, 163], "class_nam": [157, 163], "impur": [157, 163], "inferior": 157, "superior": 157, "settabl": 157, "45mm": 157, "test_penguin_1": 157, "test_penguin_2": 157, "y_proba_class_0": 157, "adelie_proba": 157, "chinstrap_proba": 157, "gentoo_proba": 157, "037": 157, "disregard": 157, "test_penguin_3": 157, "63975155": 157, "32298137": 157, "03726708": 157, "fairli": 157, "palmer": 158, "anatom": 158, "depict": 158, "set_size_inch": 158, "superimpos": [160, 164], "data_clf_column": 161, "target_clf_column": 161, "data_clf": 161, "data_reg_column": 161, "target_reg_column": 161, "data_reg": 161, "fit_and_plot_classif": 161, "fit_and_plot_regress": 161, "tree_clf": 161, "tree_reg": 161, "adequ": 161, "asymmetri": 161, "make_blob": 161, "interlac": 161, "x_1": 161, "y_1": 161, "x_2": 161, "y_2": 161, "min_impurity_decreas": 161, "asymmetr": 161, "priori": 162, "3698": 162, "5032": 162, "tricki": 163, "spectr": 163, "purpl": 163, "xx": 163, "yy": 163, "meshgrid": 163, "xfull": 163, "proba": 163, "sharei": 163, "class_of_interest": 163, "imshow_handl": 163, "extent": 163, "colorbar": 163, "cax": 163, "binar": 163, "impress": 163, "target_predicted_linear_regress": 164, "target_predicted_tre": 164, "interpol": 164, "offset": 164, "175": 164, "shortest": 164, "longest": 164, "m3": [165, 178, 180], "m5": [165, 167, 168, 169, 176], "acknowledg": 165, "prune": 171, "children": 172, "increment": 173, "refin": 173, "na_filt": 177, "author": 177, "circular": 179, "budget": [179, 183], "badli": 179, "get_paramet": 181, "anim": 185, "param_valu": 185, "powertransform": 185, "all_preprocessor": 185, "cox": 185, "classifier__n_neighbor": 185, "forgot": 185}, "objects": {}, "objtypes": {}, "objnames": {}, "titleterms": {"acknowledg": 0, "figur": 0, "attribut": [0, 2], "dataset": [1, 5, 73, 79, 80, 90, 103, 104, 105, 106, 107, 108, 152, 158], "descript": 1, "glossari": 2, "main": [2, 13, 22, 33, 39, 57, 71, 128, 134, 171, 183], "term": 2, "us": [2, 5, 10, 76, 85, 125, 126, 138, 150, 154], "thi": [2, 5], "cours": [2, 35], "api": 2, "classif": [2, 29, 141, 142, 157, 158, 167], "classifi": [2, 142], "cross": [2, 19, 20, 76, 85, 95, 96, 101], "valid": [2, 5, 19, 20, 58, 66, 76, 85, 95, 96, 101, 102], "data": [2, 5, 62, 64, 67, 73, 76, 79, 80, 81, 84, 85, 100, 128, 134], "matrix": [2, 142], "input": 2, "earli": 2, "stop": 2, "estim": [2, 101, 141, 163], "featur": [2, 31, 40, 81, 84, 87, 89, 108, 125, 126, 131, 132, 133, 137, 139, 165], "variabl": [2, 73, 84, 85, 87, 89, 108], "descriptor": 2, "covari": 2, "gener": [2, 102, 161], "perform": [2, 91, 165], "predict": [2, 5, 80, 84, 141, 142, 150, 152, 154, 163, 165], "statist": 2, "hyperparamet": [2, 11, 117, 119, 150, 151, 152, 153, 154, 161, 165, 168, 184], "infer": 2, "learn": [2, 5, 12, 21, 32, 35, 38, 49, 55, 58, 64, 69, 70, 80, 90, 95, 108, 110, 138, 140, 151, 165, 170, 182], "paramet": [2, 131, 133, 137, 161], "meta": 2, "model": [2, 5, 7, 8, 18, 37, 40, 44, 45, 47, 64, 66, 76, 80, 81, 85, 90, 91, 108, 118, 126, 128, 132, 133, 134, 141, 150, 152, 154, 165, 166, 169], "overfit": [2, 56, 61, 102], "predictor": 2, "regress": [2, 30, 132, 133, 138, 139, 140, 145, 158, 162, 176], "regressor": 2, "regular": [2, 44, 47, 131, 133, 137], "penal": 2, "sampl": [2, 94, 95], "instanc": 2, "observ": 2, "supervis": 2, "target": [2, 80], "label": [2, 5], "annot": 2, "test": [2, 53, 79, 80, 101], "set": [2, 151], "train": [2, 53, 79, 80, 101], "fit": [2, 64, 80, 81, 85], "transform": 2, "underfit": [2, 56, 61, 102], "unsupervis": 2, "other": [2, 161], "notebook": [3, 73, 76, 79, 80, 139], "time": [3, 5, 12, 21, 32, 38, 55, 70, 170, 182], "tabl": [4, 165], "content": [4, 165], "conclud": [5, 6, 165], "remark": [5, 6, 165], "last": 5, "lesson": [5, 108], "goal": 5, "The": [5, 73, 76, 103, 104, 105, 106, 107, 142, 158, 165], "big": 5, "messag": [5, 132], "mooc": [5, 35], "1": [5, 72, 108], "machin": [5, 49, 165], "pipelin": [5, 69, 84, 87, 89, 90, 110, 165], "2": [5, 59, 108], "adapt": [5, 109], "complex": [5, 110], "3": [5, 108, 185], "specif": [5, 85], "go": [5, 13, 22, 33, 39, 57, 71, 171, 183], "further": [5, 13, 22, 33, 39, 57, 71, 171, 183], "more": [5, 85, 101], "about": [5, 119], "scikit": [5, 35, 64, 69, 80, 90, 110, 138, 140, 151], "we": [5, 90], "ar": 5, "an": [5, 84], "open": 5, "sourc": 5, "commun": 5, "topic": 5, "have": 5, "cover": 5, "studi": 5, "bring": 5, "valu": 5, "bigger": 5, "pictur": 5, "beyond": 5, "evalu": [5, 76, 84, 85, 142, 152, 165], "matter": 5, "small": 5, "part": 5, "problem": [5, 163], "most": 5, "technic": 5, "craft": 5, "all": 5, "how": 5, "choic": [5, 19], "output": 5, "bias": 5, "versu": [5, 51, 54], "causal": 5, "societ": 5, "impact": [5, 131, 137], "intuit": [7, 8, 37, 45, 47, 166, 169], "ensembl": [7, 8, 9, 10, 11, 118, 165], "bag": [7, 110], "boost": [8, 9, 109, 115, 116, 117], "base": [9, 84, 85, 166, 169], "method": [10, 11], "bootstrap": [10, 110], "tune": [11, 117, 133, 150, 152, 154, 165, 178, 180], "modul": [12, 21, 32, 38, 55, 70, 170, 182], "overview": [12, 21, 32, 38, 55, 70, 170, 182], "what": [12, 21, 32, 38, 55, 70, 170, 182], "you": [12, 21, 32, 38, 55, 70, 170, 182], "befor": [12, 21, 32, 38, 55, 70, 170, 182], "get": [12, 21, 32, 38, 55, 70, 151, 170, 182], "start": [12, 21, 32, 38, 55, 70, 170, 182], "object": [12, 21, 32, 38, 55, 70, 170, 182], "schedul": [12, 21, 32, 38, 55, 70, 170, 182], "take": [13, 22, 33, 39, 57, 71, 108, 132, 171, 183], "awai": [13, 22, 33, 39, 57, 71, 108, 132, 171, 183], "wrap": [13, 17, 22, 28, 33, 39, 46, 57, 59, 71, 72, 171, 177, 183, 185], "up": [13, 17, 22, 28, 33, 39, 46, 57, 59, 71, 72, 116, 171, 177, 183, 185], "To": [13, 22, 33, 39, 57, 71, 171, 183], "quiz": [14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 172, 173, 174, 175, 177, 179, 181, 185], "m6": [14, 15, 16, 111, 112, 113, 114, 120, 121, 122, 123], "01": [14, 23, 41, 48, 60, 63, 74, 75, 92, 93, 97, 98, 111, 120, 124, 127, 128, 134, 148, 155, 159, 163, 172, 181], "question": [14, 15, 16, 17, 23, 24, 25, 26, 27, 28, 34, 36, 41, 42, 43, 46, 48, 50, 52, 59, 60, 63, 65, 68, 72, 172, 173, 174, 175, 177, 179, 181, 185], "02": [15, 24, 42, 52, 65, 77, 82, 112, 121, 129, 135, 143, 146, 149, 156, 160, 164, 173, 179], "03": [16, 25, 43, 50, 68, 78, 83, 113, 122, 130, 136, 144, 147, 174], "6": 17, "compar": [18, 53, 91], "simpl": [18, 91], "baselin": [18, 91, 142], "nest": [20, 96], "m7": [23, 24, 25, 26, 27, 93, 98, 143, 144, 146, 147], "04": [26, 86, 88, 114, 123, 131, 137, 175], "05": [27, 87, 89], "7": 28, "metric": [29, 30, 142], "caveat": 31, "select": [31, 84, 85, 125, 126, 165], "introduct": 35, "present": [35, 108], "welcom": 35, "follow": 35, "prerequisit": [35, 128, 134], "materi": 35, "social": 35, "network": 35, "linear": [37, 40, 44, 45, 47, 108, 131, 132, 133, 137, 138, 139, 140, 141, 165], "non": [40, 100, 131, 132, 137, 139], "engin": [40, 131, 132, 137, 139], "m4": [41, 42, 43, 128, 129, 130, 131, 134, 135, 136, 137], "4": 46, "intro": 48, "introduc": 49, "concept": [49, 165], "m2": [50, 52, 60, 92, 97], "bia": [51, 54], "varianc": [51, 54], "error": [53, 101], "trade": 54, "off": 54, "curv": [58, 95, 102], "tabular": 62, "explor": 62, "m1": [63, 65, 68, 74, 75, 77, 78, 82, 83, 86, 87, 88, 89], "numer": [64, 79, 81, 85, 87, 89], "handl": 67, "categor": [67, 84, 85, 87, 89], "visual": [69, 73, 90], "jupyt": [69, 90], "first": [73, 80, 90], "look": [73, 119], "our": [73, 84, 150, 152, 154], "load": [73, 79, 80, 90, 128, 134, 152], "adult": [73, 103], "censu": [73, 103], "column": [73, 85], "inspect": [73, 108], "creat": [73, 90, 161], "decis": [73, 115, 117, 131, 137, 157, 161, 162, 165, 167, 168, 176], "rule": 73, "hand": 73, "recap": [73, 76, 79, 80, 139], "exercis": [74, 75, 77, 78, 82, 83, 86, 87, 88, 89, 92, 93, 97, 98, 111, 112, 113, 114, 120, 121, 122, 123, 124, 127, 128, 129, 130, 131, 134, 135, 136, 137, 143, 144, 146, 147, 148, 149, 155, 156, 159, 160, 163, 164], "solut": [75, 82, 83, 88, 89, 97, 98, 120, 121, 122, 123, 127, 134, 135, 136, 137, 146, 147, 155, 156, 163, 164], "prepar": [76, 81], "need": 76, "work": 79, "entir": 79, "identifi": [79, 84], "split": [79, 80], "panda": 80, "separ": 80, "make": 80, "preprocess": 81, "encod": [84, 87, 89], "type": [84, 85], "strategi": 84, "categori": [84, 87, 89], "ordin": 84, "nomin": 84, "without": [84, 140, 152], "assum": 84, "ani": 84, "order": 84, "choos": 84, "togeth": 85, "dispatch": 85, "processor": 85, "power": 85, "refer": [87, 89], "scale": [87, 89, 108, 133], "integ": [87, 89], "code": [87, 89], "One": [87, 89], "hot": [87, 89], "analysi": [89, 153, 184], "Then": 90, "final": 90, "score": 90, "group": 94, "effect": [95, 133, 161], "size": 95, "summari": [95, 101, 102, 132], "stratif": 99, "i": 100, "d": 100, "framework": 101, "vs": [101, 102], "stabil": 101, "detail": [101, 119], "regard": 101, "cross_valid": 101, "am": 104, "hous": [104, 107], "bike": 105, "ride": 105, "blood": 106, "transfus": 106, "california": 107, "import": [108, 161], "0": 108, "sign": 108, "coeffici": 108, "A": [108, 119], "surpris": 108, "associ": 108, "check": 108, "spars": 108, "lasso": 108, "randomforest": 108, "feature_importances_": 108, "permut": 108, "discuss": 108, "adaboost": 109, "resampl": 110, "aggreg": 110, "gradient": [115, 116, 117], "tree": [115, 117, 157, 161, 162, 165, 166, 167, 168, 169, 176], "gbdt": 115, "speed": 116, "random": [117, 119, 154], "forest": [117, 119], "histogram": 117, "introductori": 118, "exampl": 118, "default": 119, "benefit": 125, "limit": 126, "definit": [128, 134], "influenc": [131, 137], "c": [131, 137], "boundari": [131, 137], "weight": [131, 137], "logist": 132, "addit": 132, "interact": 132, "multi": [132, 163], "step": 132, "probabl": [141, 142, 163], "accuraci": 142, "confus": 142, "deriv": 142, "issu": 142, "class": [142, 163], "imbal": 142, "differ": 142, "threshold": 142, "m3": [148, 149, 155, 156, 179, 181], "grid": 150, "search": [150, 153, 154, 184], "With": 152, "result": [153, 184], "build": 157, "penguin": 158, "m5": [159, 160, 163, 164, 172, 173, 174, 175], "helper": 161, "function": 161, "max_depth": 161, "best": 165, "appendix": 165, "interpret": 165, "5": 177, "autom": 178, "manual": 180}, "envversion": {"sphinx.domains.c": 2, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 6, "sphinx.domains.index": 1, "sphinx.domains.javascript": 2, "sphinx.domains.math": 2, "sphinx.domains.python": 3, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.intersphinx": 1, "sphinx": 56}}) \ No newline at end of file