Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion src/hyperactive/experiment/integrations/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
"""Integrations with packages for tuning."""
# copyright: hyperactive developers, MIT License (see LICENSE file)

from hyperactive.experiment.integrations.sklearn_cv import SklearnCvExperiment
from hyperactive.experiment.integrations.sklearn_cv import (
SklearnCvExperiment,
XGBoostCvExperiment,
)
from hyperactive.experiment.integrations.skpro_probareg import (
SkproProbaRegExperiment,
)
Expand All @@ -21,4 +24,5 @@
"SktimeClassificationExperiment",
"SktimeForecastingExperiment",
"TorchExperiment",
"XGBoostCvExperiment",
]
84 changes: 84 additions & 0 deletions src/hyperactive/experiment/integrations/sklearn_cv.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
from hyperactive.experiment.integrations._skl_cv import _coerce_cv
from hyperactive.experiment.integrations._skl_metrics import _coerce_to_scorer_and_sign

__all__ = ["SklearnCvExperiment", "XGBoostCvExperiment"]


class SklearnCvExperiment(BaseExperiment):
"""Experiment adapter for sklearn cross-validation experiments.
Expand Down Expand Up @@ -254,3 +256,85 @@ def _get_score_params(self):
score_params_defaults,
]
return params


class XGBoostCvExperiment(SklearnCvExperiment):
"""Experiment adapter for XGBoost cross-validation.

Thin wrapper around SklearnCvExperiment for XGBoost estimators.
XGBoost classifiers and regressors are sklearn-compatible,
this class exists for discoverability.

Parameters
----------
estimator : xgboost estimator
XGBClassifier, XGBRegressor, or XGBRanker instance.
X : array-like, shape (n_samples, n_features)
Input data for the model.
y : array-like, shape (n_samples,) or (n_samples, n_outputs)
Target values.
cv : int or cross-validation generator, default = KFold(n_splits=3, shuffle=True)
Number of folds or cross-validation strategy.
If int, uses KFold(n_splits=cv, shuffle=True).
scoring : callable or str, default = accuracy_score or mean_squared_error
Scoring function or metric. Default depends on estimator type.

Example
-------
>>> from hyperactive.experiment.integrations import XGBoostCvExperiment
>>> from sklearn.datasets import load_iris
>>> from xgboost import XGBClassifier # doctest: +SKIP
>>>
>>> X, y = load_iris(return_X_y=True)
>>> xgb_exp = XGBoostCvExperiment( # doctest: +SKIP
... estimator=XGBClassifier(verbosity=0),
... X=X,
... y=y,
... )
>>> params = {"n_estimators": 100, "max_depth": 3}
>>> score, metadata = xgb_exp.score(params) # doctest: +SKIP
"""

_tags = {
"python_dependencies": "xgboost",
}

@classmethod
def get_test_params(cls, parameter_set="default"):
"""Return testing parameter settings for the estimator."""
from skbase.utils.dependencies import _check_soft_dependencies

if not _check_soft_dependencies("xgboost", severity="none"):
return []

from sklearn.datasets import load_diabetes, load_iris
from xgboost import XGBClassifier, XGBRegressor

X, y = load_iris(return_X_y=True)
params0 = {
"estimator": XGBClassifier(n_estimators=10, verbosity=0),
"X": X,
"y": y,
"cv": 2,
}

X, y = load_diabetes(return_X_y=True)
params1 = {
"estimator": XGBRegressor(n_estimators=10, verbosity=0),
"X": X,
"y": y,
"cv": 2,
}

return [params0, params1]

@classmethod
def _get_score_params(cls):
from skbase.utils.dependencies import _check_soft_dependencies

if not _check_soft_dependencies("xgboost", severity="none"):
return []

val0 = {"n_estimators": 5, "max_depth": 2}
val1 = {"n_estimators": 5, "max_depth": 2}
return [val0, val1]