Coverage for mlos_bench/mlos_bench/optimizers/one_shot_optimizer.py: 100%
17 statements
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-21 01:50 +0000
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-21 01:50 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""No-op optimizer for mlos_bench that proposes a single configuration."""
7import logging
9from mlos_bench.optimizers.mock_optimizer import MockOptimizer
10from mlos_bench.services.base_service import Service
11from mlos_bench.tunables.tunable_groups import TunableGroups
13_LOG = logging.getLogger(__name__)
16class OneShotOptimizer(MockOptimizer):
17 """
18 No-op optimizer that proposes a single configuration and returns.
20 Explicit configs (partial or full) are possible using configuration files.
21 """
23 def __init__(
24 self,
25 tunables: TunableGroups,
26 config: dict,
27 global_config: dict | None = None,
28 service: Service | None = None,
29 ):
30 super().__init__(tunables, config, global_config, service)
31 _LOG.info("Run a single iteration for: %s", self._tunables)
32 self._max_suggestions = 1 # Always run for just one iteration.
34 def suggest(self) -> TunableGroups:
35 """Always produce the same (initial) suggestion."""
36 tunables = super().suggest()
37 self._start_with_defaults = True
38 return tunables
40 @property
41 def supports_preload(self) -> bool:
42 return False