Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions docs/source/_snippets/user_guide/integrations.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,41 @@ def configure_optimizers(self):
best_params = optimizer.solve()
# [end:pytorch_lightning]

# [start:lightgbm_experiment]
from lightgbm import LGBMClassifier
from sklearn.datasets import load_iris

from hyperactive.experiment.integrations import LightGBMExperiment
from hyperactive.opt.gfo import BayesianOptimizer

# Load data
X, y = load_iris(return_X_y=True)

# Create the experiment
experiment = LightGBMExperiment(
estimator=LGBMClassifier(),
X=X,
y=y,
cv=3,
)

# Define search space
search_space = {
"n_estimators": [50, 100, 200],
"max_depth": [3, 5, 7, -1],
"learning_rate": [0.01, 0.05, 0.1, 0.2],
}

# Optimize
optimizer = BayesianOptimizer(
search_space=search_space,
n_iter=10,
experiment=experiment,
)
best_params = optimizer.solve()
print(f"Best parameters: {best_params}")
# [end:lightgbm_experiment]


# --- Runnable test code below ---
if __name__ == "__main__":
Expand Down
13 changes: 12 additions & 1 deletion docs/source/api_reference/experiments_integrations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ The :mod:`hyperactive.experiment.integrations` module contains experiment classe
for integration with machine learning frameworks.

These experiments provide seamless hyperparameter optimization for scikit-learn,
sktime, skpro, and PyTorch Lightning models.
sktime, skpro, PyTorch Lightning, and LightGBM models.

Scikit-Learn
------------
Expand Down Expand Up @@ -55,3 +55,14 @@ Experiments for PyTorch Lightning models.
:template: class.rst

TorchExperiment

LightGBM
--------

Cross-validation experiments for LightGBM estimators.

.. autosummary::
:toctree: auto_generated/
:template: class.rst

LightGBMExperiment
43 changes: 42 additions & 1 deletion docs/source/user_guide/integrations.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ Framework Integrations
Hyperactive integrates with popular ML frameworks, providing drop-in replacements
for tools like ``GridSearchCV``. Each ML framework has its own conventions for training and evaluation. The integration
classes handle cross-validation setup, scoring metrics, and parameter translation, so
you can use any optimizer with scikit-learn, sktime, skpro, or PyTorch models.
you can use any optimizer with scikit-learn, sktime, skpro, PyTorch, or LightGBM models.

----

Expand Down Expand Up @@ -53,6 +53,15 @@ Supported Frameworks

Deep learning models

.. grid-item-card:: LightGBM
:class-card: sd-border-info
:link: #lightgbm-integration
:link-type: url

**LightGBMExperiment**

Gradient boosting models

----

Quick Reference
Expand Down Expand Up @@ -86,6 +95,10 @@ Quick Reference
- ``TorchExperiment``
- Deep learning models
- ``[all_extras]``
* - LightGBM
- ``LightGBMExperiment``
- Classification, regression
- ``[lightgbm]``

----

Expand Down Expand Up @@ -237,6 +250,34 @@ For deep learning hyperparameter optimization with PyTorch Lightning:

----

LightGBM Integration
--------------------

For gradient boosting hyperparameter optimization with LightGBM:

.. note::

Requires ``pip install lightgbm``

.. grid:: 1
:gutter: 0

.. grid-item::
:class: sd-bg-light sd-pt-3 sd-pb-1 sd-ps-3 sd-pe-3 sd-rounded-3

**Key Features**

- Optimize LightGBM classifiers and regressors
- LightGBM follows the sklearn API, so cross-validation works out of the box
- Supports all LightGBM hyperparameters (``n_estimators``, ``max_depth``, ``learning_rate``, etc.)

.. literalinclude:: ../_snippets/user_guide/integrations.py
:language: python
:start-after: # [start:lightgbm_experiment]
:end-before: # [end:lightgbm_experiment]

----

Tips
----

Expand Down
50 changes: 50 additions & 0 deletions src/hyperactive/base/tests/test_lightgbm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
"""Integration test for end-to-end usage of optimizer with LightGBM experiment."""
# copyright: hyperactive developers, MIT License (see LICENSE file)


def test_endtoend_lightgbm():
"""Test end-to-end usage of HillClimbing optimizer with LightGBM experiment."""
from skbase.utils.dependencies import _check_soft_dependencies

if not _check_soft_dependencies("lightgbm", severity="none"):
return None

# define the experiment
from lightgbm import LGBMClassifier
from sklearn.datasets import load_iris

from hyperactive.experiment.integrations import LightGBMExperiment

X, y = load_iris(return_X_y=True)

lgbm_exp = LightGBMExperiment(
estimator=LGBMClassifier(n_estimators=10, verbosity=-1),
X=X,
y=y,
cv=2,
)

# set up the HillClimbing optimizer
import numpy as np

from hyperactive.opt import HillClimbing

hillclimbing_config = {
"search_space": {
"n_estimators": np.array([5, 10, 20]),
"max_depth": np.array([2, 3, 5]),
},
"n_iter": 10,
}
hill_climbing = HillClimbing(**hillclimbing_config, experiment=lgbm_exp)

# run the HillClimbing optimizer
hill_climbing.solve()

best_params = hill_climbing.best_params_
assert best_params is not None, "Best parameters should not be None"
assert isinstance(best_params, dict), "Best parameters should be a dictionary"
assert (
"n_estimators" in best_params
), "Best parameters should contain 'n_estimators'"
assert "max_depth" in best_params, "Best parameters should contain 'max_depth'"
2 changes: 2 additions & 0 deletions src/hyperactive/experiment/integrations/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Integrations with packages for tuning."""
# copyright: hyperactive developers, MIT License (see LICENSE file)

from hyperactive.experiment.integrations.lightgbm_experiment import LightGBMExperiment
from hyperactive.experiment.integrations.sklearn_cv import SklearnCvExperiment
from hyperactive.experiment.integrations.skpro_probareg import (
SkproProbaRegExperiment,
Expand All @@ -21,4 +22,5 @@
"SktimeClassificationExperiment",
"SktimeForecastingExperiment",
"TorchExperiment",
"LightGBMExperiment",
]
64 changes: 64 additions & 0 deletions src/hyperactive/experiment/integrations/lightgbm_experiment.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
"""Experiment adapter for LightGBM cross-validation experiments."""

# copyright: hyperactive developers, MIT License (see LICENSE file)

from hyperactive.experiment.integrations.sklearn_cv import SklearnCvExperiment


class LightGBMExperiment(SklearnCvExperiment):
"""Experiment adapter for LightGBM cross-validation experiments.

Thin wrapper around SklearnCvExperiment for LightGBM estimators.

LightGBM estimators follow the sklearn API, so this class does not
add new functionality beyond SklearnCvExperiment. It exists for
discoverability and explicit LightGBM support.
"""

_tags = {
"python_dependencies": "lightgbm",
}

@classmethod
def get_test_params(cls, parameter_set="default"):
"""Return testing parameter settings for the estimator."""
from skbase.utils.dependencies import _check_soft_dependencies

if not _check_soft_dependencies("lightgbm", severity="none"):
return []

from lightgbm import LGBMClassifier, LGBMRegressor
from sklearn.datasets import load_diabetes, load_iris

# Classification test case
X, y = load_iris(return_X_y=True)
params0 = {
"estimator": LGBMClassifier(n_estimators=10),
"X": X,
"y": y,
"cv": 2,
}

# Regression test case
X, y = load_diabetes(return_X_y=True)
params1 = {
"estimator": LGBMRegressor(n_estimators=10),
"X": X,
"y": y,
"cv": 2,
}

return [params0, params1]

@classmethod
def _get_score_params(cls):
"""Return parameter settings for score/evaluate tests."""
from skbase.utils.dependencies import _check_soft_dependencies

if not _check_soft_dependencies("lightgbm", severity="none"):
return []

val0 = {"n_estimators": 5, "max_depth": 2}
val1 = {"n_estimators": 5, "max_depth": 2}

return [val0, val1]