Skip to content
3 changes: 3 additions & 0 deletions mlos_bench/mlos_bench/tests/config/schedulers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
"""
Unit tests for the mlos_bench Scheduler configs.
"""
54 changes: 54 additions & 0 deletions mlos_bench/mlos_bench/tests/config/schedulers/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
#
"""
Pytest fixtures for Scheduler config tests.

Provides fixtures for creating multiple TrialRunner instances using the mock environment config.
"""

from importlib.resources import files

import pytest

from mlos_bench.services.config_persistence import ConfigPersistenceService
from mlos_bench.schedulers.trial_runner import TrialRunner
from mlos_bench.util import path_join

# pylint: disable=redefined-outer-name

TRIAL_RUNNERS_COUNT = 4

@pytest.fixture
def mock_env_config_path() -> str:
"""
Returns the absolute path to the mock environment configuration file.
This file is used to create TrialRunner instances for testing.
"""

# Use the files() routine to locate the file relative to this directory
return path_join(
str(files("mlos_bench.config").joinpath("environments", "mock", "mock_env.jsonc")),
abs_path=True,
)


@pytest.fixture
def trial_runners(
config_loader_service: ConfigPersistenceService,
mock_env_config_path: str,
) -> list[TrialRunner]:
"""
Fixture that returns a list of TrialRunner instances using the mock environment config.

Returns
-------
list[TrialRunner]
List of TrialRunner instances created from the mock environment config.
"""
return TrialRunner.create_from_json(
config_loader=config_loader_service,
env_json=mock_env_config_path,
num_trial_runners=TRIAL_RUNNERS_COUNT,
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
#
"""Tests for loading scheduler config examples."""
import logging

import pytest

from mlos_bench.config.schemas.config_schemas import ConfigSchema
from mlos_bench.optimizers.mock_optimizer import MockOptimizer
from mlos_bench.schedulers.base_scheduler import Scheduler
from mlos_bench.schedulers.trial_runner import TrialRunner
from mlos_bench.services.config_persistence import ConfigPersistenceService
from mlos_bench.storage.sql.storage import SqlStorage
from mlos_bench.tests.config import locate_config_examples
from mlos_bench.util import get_class_from_name

import mlos_bench.tests.storage.sql.fixtures
import mlos_bench.tests.optimizers.fixtures

mock_opt = mlos_bench.tests.optimizers.fixtures.mock_opt
sqlite_storage = mlos_bench.tests.storage.sql.fixtures.sqlite_storage


_LOG = logging.getLogger(__name__)
_LOG.setLevel(logging.DEBUG)

# pylint: disable=redefined-outer-name

# Get the set of configs to test.
CONFIG_TYPE = "schedulers"


def filter_configs(configs_to_filter: list[str]) -> list[str]:
"""If necessary, filter out json files that aren't for the module we're testing."""
return configs_to_filter


configs = locate_config_examples(
ConfigPersistenceService.BUILTIN_CONFIG_PATH,
CONFIG_TYPE,
filter_configs,
)
assert configs


@pytest.mark.parametrize("config_path", configs)
def test_load_scheduler_config_examples(
config_loader_service: ConfigPersistenceService,
config_path: str,
mock_env_config_path: str,
trial_runners: list[TrialRunner],
sqlite_storage: SqlStorage,
mock_opt: MockOptimizer,
) -> None:
"""Tests loading a config example."""
config = config_loader_service.load_config(config_path, ConfigSchema.SCHEDULER)
assert isinstance(config, dict)
cls = get_class_from_name(config["class"])
assert issubclass(cls, Scheduler)
global_config = {
# Required configs generally provided by the Launcher.
"experiment_id": f"test_experiment_{__name__}",
"trial_id": 1,
}
# Make an instance of the class based on the config.
scheduler_inst = config_loader_service.build_scheduler(
config=config,
global_config=global_config,
trial_runners=trial_runners,
optimizer=mock_opt,
storage=sqlite_storage,
root_env_config=mock_env_config_path,
)
assert scheduler_inst is not None
assert isinstance(scheduler_inst, cls)
180 changes: 13 additions & 167 deletions mlos_bench/mlos_bench/tests/optimizers/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,170 +2,16 @@
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
#
"""Test fixtures for mlos_bench optimizers."""


import pytest

from mlos_bench.optimizers.manual_optimizer import ManualOptimizer
from mlos_bench.optimizers.mlos_core_optimizer import MlosCoreOptimizer
from mlos_bench.optimizers.mock_optimizer import MockOptimizer
from mlos_bench.tests import SEED
from mlos_bench.tunables.tunable_groups import TunableGroups

# pylint: disable=redefined-outer-name


@pytest.fixture
def mock_configs() -> list[dict]:
"""Mock configurations of earlier experiments."""
return [
{
"vmSize": "Standard_B4ms",
"idle": "halt",
"kernel_sched_migration_cost_ns": 50000,
"kernel_sched_latency_ns": 1000000,
},
{
"vmSize": "Standard_B4ms",
"idle": "halt",
"kernel_sched_migration_cost_ns": 40000,
"kernel_sched_latency_ns": 2000000,
},
{
"vmSize": "Standard_B4ms",
"idle": "mwait",
"kernel_sched_migration_cost_ns": -1, # Special value
"kernel_sched_latency_ns": 3000000,
},
{
"vmSize": "Standard_B2s",
"idle": "mwait",
"kernel_sched_migration_cost_ns": 200000,
"kernel_sched_latency_ns": 4000000,
},
]


@pytest.fixture
def mock_opt_no_defaults(tunable_groups: TunableGroups) -> MockOptimizer:
"""Test fixture for MockOptimizer that ignores the initial configuration."""
return MockOptimizer(
tunables=tunable_groups,
service=None,
config={
"optimization_targets": {"score": "min"},
"max_suggestions": 5,
"start_with_defaults": False,
"seed": SEED,
},
)


@pytest.fixture
def mock_opt(tunable_groups: TunableGroups) -> MockOptimizer:
"""Test fixture for MockOptimizer."""
return MockOptimizer(
tunables=tunable_groups,
service=None,
config={"optimization_targets": {"score": "min"}, "max_suggestions": 5, "seed": SEED},
)


@pytest.fixture
def mock_opt_max(tunable_groups: TunableGroups) -> MockOptimizer:
"""Test fixture for MockOptimizer."""
return MockOptimizer(
tunables=tunable_groups,
service=None,
config={"optimization_targets": {"score": "max"}, "max_suggestions": 10, "seed": SEED},
)


@pytest.fixture
def flaml_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
"""Test fixture for mlos_core FLAML optimizer."""
return MlosCoreOptimizer(
tunables=tunable_groups,
service=None,
config={
"optimization_targets": {"score": "min"},
"max_suggestions": 15,
"optimizer_type": "FLAML",
"seed": SEED,
},
)


@pytest.fixture
def flaml_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
"""Test fixture for mlos_core FLAML optimizer."""
return MlosCoreOptimizer(
tunables=tunable_groups,
service=None,
config={
"optimization_targets": {"score": "max"},
"max_suggestions": 15,
"optimizer_type": "FLAML",
"seed": SEED,
},
)


# FIXME: SMAC's RF model can be non-deterministic at low iterations, which are
# normally calculated as a percentage of the max_suggestions and number of
# tunable dimensions, so for now we set the initial random samples equal to the
# number of iterations and control them with a seed.

SMAC_ITERATIONS = 10


@pytest.fixture
def smac_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
"""Test fixture for mlos_core SMAC optimizer."""
return MlosCoreOptimizer(
tunables=tunable_groups,
service=None,
config={
"optimization_targets": {"score": "min"},
"max_suggestions": SMAC_ITERATIONS,
"optimizer_type": "SMAC",
"seed": SEED,
"output_directory": None,
# See Above
"n_random_init": SMAC_ITERATIONS,
"max_ratio": 1.0,
},
)


@pytest.fixture
def smac_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
"""Test fixture for mlos_core SMAC optimizer."""
return MlosCoreOptimizer(
tunables=tunable_groups,
service=None,
config={
"optimization_targets": {"score": "max"},
"max_suggestions": SMAC_ITERATIONS,
"optimizer_type": "SMAC",
"seed": SEED,
"output_directory": None,
# See Above
"n_random_init": SMAC_ITERATIONS,
"max_ratio": 1.0,
},
)


@pytest.fixture
def manual_opt(tunable_groups: TunableGroups, mock_configs: list[dict]) -> ManualOptimizer:
"""Test fixture for ManualOptimizer."""
return ManualOptimizer(
tunables=tunable_groups,
service=None,
config={
"max_cycles": 2,
"tunable_values_cycle": mock_configs,
},
)
"""Export test fixtures for mlos_bench optimizers."""

import mlos_bench.tests.optimizers.fixtures

mock_configs = mlos_bench.tests.optimizers.fixtures.mock_configs
mock_opt_no_defaults = mlos_bench.tests.optimizers.fixtures.mock_opt_no_defaults
mock_opt = mlos_bench.tests.optimizers.fixtures.mock_opt
mock_opt_max = mlos_bench.tests.optimizers.fixtures.mock_opt_max
flaml_opt = mlos_bench.tests.optimizers.fixtures.flaml_opt
flaml_opt_max = mlos_bench.tests.optimizers.fixtures.flaml_opt_max
smac_opt = mlos_bench.tests.optimizers.fixtures.smac_opt
smac_opt_max = mlos_bench.tests.optimizers.fixtures.smac_opt_max
manual_opt = mlos_bench.tests.optimizers.fixtures.manual_opt
Loading
Loading