Coverage for mlos_bench/mlos_bench/tests/optimizers/conftest.py: 100%
34 statements
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-21 01:50 +0000
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-21 01:50 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""Test fixtures for mlos_bench optimizers."""
8import pytest
10from mlos_bench.optimizers.manual_optimizer import ManualOptimizer
11from mlos_bench.optimizers.mlos_core_optimizer import MlosCoreOptimizer
12from mlos_bench.optimizers.mock_optimizer import MockOptimizer
13from mlos_bench.tests import SEED
14from mlos_bench.tunables.tunable_groups import TunableGroups
16# pylint: disable=redefined-outer-name
19@pytest.fixture
20def mock_configs() -> list[dict]:
21 """Mock configurations of earlier experiments."""
22 return [
23 {
24 "vmSize": "Standard_B4ms",
25 "idle": "halt",
26 "kernel_sched_migration_cost_ns": 50000,
27 "kernel_sched_latency_ns": 1000000,
28 },
29 {
30 "vmSize": "Standard_B4ms",
31 "idle": "halt",
32 "kernel_sched_migration_cost_ns": 40000,
33 "kernel_sched_latency_ns": 2000000,
34 },
35 {
36 "vmSize": "Standard_B4ms",
37 "idle": "mwait",
38 "kernel_sched_migration_cost_ns": -1, # Special value
39 "kernel_sched_latency_ns": 3000000,
40 },
41 {
42 "vmSize": "Standard_B2s",
43 "idle": "mwait",
44 "kernel_sched_migration_cost_ns": 200000,
45 "kernel_sched_latency_ns": 4000000,
46 },
47 ]
50@pytest.fixture
51def mock_opt_no_defaults(tunable_groups: TunableGroups) -> MockOptimizer:
52 """Test fixture for MockOptimizer that ignores the initial configuration."""
53 return MockOptimizer(
54 tunables=tunable_groups,
55 service=None,
56 config={
57 "optimization_targets": {"score": "min"},
58 "max_suggestions": 5,
59 "start_with_defaults": False,
60 "seed": SEED,
61 },
62 )
65@pytest.fixture
66def mock_opt(tunable_groups: TunableGroups) -> MockOptimizer:
67 """Test fixture for MockOptimizer."""
68 return MockOptimizer(
69 tunables=tunable_groups,
70 service=None,
71 config={"optimization_targets": {"score": "min"}, "max_suggestions": 5, "seed": SEED},
72 )
75@pytest.fixture
76def mock_opt_max(tunable_groups: TunableGroups) -> MockOptimizer:
77 """Test fixture for MockOptimizer."""
78 return MockOptimizer(
79 tunables=tunable_groups,
80 service=None,
81 config={"optimization_targets": {"score": "max"}, "max_suggestions": 10, "seed": SEED},
82 )
85@pytest.fixture
86def flaml_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
87 """Test fixture for mlos_core FLAML optimizer."""
88 return MlosCoreOptimizer(
89 tunables=tunable_groups,
90 service=None,
91 config={
92 "optimization_targets": {"score": "min"},
93 "max_suggestions": 15,
94 "optimizer_type": "FLAML",
95 "seed": SEED,
96 },
97 )
100@pytest.fixture
101def flaml_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
102 """Test fixture for mlos_core FLAML optimizer."""
103 return MlosCoreOptimizer(
104 tunables=tunable_groups,
105 service=None,
106 config={
107 "optimization_targets": {"score": "max"},
108 "max_suggestions": 15,
109 "optimizer_type": "FLAML",
110 "seed": SEED,
111 },
112 )
115# FIXME: SMAC's RF model can be non-deterministic at low iterations, which are
116# normally calculated as a percentage of the max_suggestions and number of
117# tunable dimensions, so for now we set the initial random samples equal to the
118# number of iterations and control them with a seed.
120SMAC_ITERATIONS = 10
123@pytest.fixture
124def smac_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
125 """Test fixture for mlos_core SMAC optimizer."""
126 return MlosCoreOptimizer(
127 tunables=tunable_groups,
128 service=None,
129 config={
130 "optimization_targets": {"score": "min"},
131 "max_suggestions": SMAC_ITERATIONS,
132 "optimizer_type": "SMAC",
133 "seed": SEED,
134 "output_directory": None,
135 # See Above
136 "n_random_init": SMAC_ITERATIONS,
137 "max_ratio": 1.0,
138 },
139 )
142@pytest.fixture
143def smac_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
144 """Test fixture for mlos_core SMAC optimizer."""
145 return MlosCoreOptimizer(
146 tunables=tunable_groups,
147 service=None,
148 config={
149 "optimization_targets": {"score": "max"},
150 "max_suggestions": SMAC_ITERATIONS,
151 "optimizer_type": "SMAC",
152 "seed": SEED,
153 "output_directory": None,
154 # See Above
155 "n_random_init": SMAC_ITERATIONS,
156 "max_ratio": 1.0,
157 },
158 )
161@pytest.fixture
162def manual_opt(tunable_groups: TunableGroups, mock_configs: list[dict]) -> ManualOptimizer:
163 """Test fixture for ManualOptimizer."""
164 return ManualOptimizer(
165 tunables=tunable_groups,
166 service=None,
167 config={
168 "max_cycles": 2,
169 "tunable_values_cycle": mock_configs,
170 },
171 )