Coverage for mlos_bench/mlos_bench/tests/launcher_run_test.py: 97%
34 statements
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-21 01:50 +0000
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-21 01:50 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""Unit tests to check the main CLI launcher."""
6import os
7import re
9import pytest
11from mlos_bench.services.config_persistence import ConfigPersistenceService
12from mlos_bench.services.local.local_exec import LocalExecService
13from mlos_bench.util import path_join
15# pylint: disable=redefined-outer-name
18@pytest.fixture
19def root_path() -> str:
20 """Root path of mlos_bench project."""
21 return path_join(os.path.dirname(__file__), "../../..", abs_path=True)
24@pytest.fixture
25def local_exec_service() -> LocalExecService:
26 """Test fixture for LocalExecService."""
27 return LocalExecService(
28 parent=ConfigPersistenceService(
29 {
30 "config_path": [
31 "mlos_bench/config",
32 "mlos_bench/examples",
33 ]
34 }
35 )
36 )
39def _launch_main_app(
40 root_path: str,
41 local_exec_service: LocalExecService,
42 cli_config: str,
43 re_expected: list[str],
44) -> None:
45 """Run mlos_bench command-line application with given config and check the results
46 in the log.
47 """
48 with local_exec_service.temp_dir_context() as temp_dir:
50 # Test developers note: for local debugging,
51 # uncomment the following line to use a known file path that can be examined:
52 # temp_dir = '/tmp'
53 log_path = path_join(temp_dir, "mock-test.log")
54 (return_code, _stdout, _stderr) = local_exec_service.local_exec(
55 [
56 "./mlos_bench/mlos_bench/run.py"
57 " --config_path ./mlos_bench/mlos_bench/tests/config/"
58 f" {cli_config} --log_file '{log_path}'"
59 ],
60 cwd=root_path,
61 )
62 assert return_code == 0
64 try:
65 iter_expected = iter(re_expected)
66 re_log = re.compile(next(iter_expected))
67 with open(log_path, encoding="utf-8") as fh_out:
68 for line in fh_out:
69 if re_log.match(line):
70 re_log = re.compile(next(iter_expected))
71 assert False, f"Pattern not found: '{re_log.pattern}'"
72 except StopIteration:
73 pass # Success: all patterns found
76_RE_DATE = r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}"
79def test_launch_main_app_bench(root_path: str, local_exec_service: LocalExecService) -> None:
80 """Run mlos_bench command-line application with mock benchmark config and default
81 tunable values and check the results in the log.
82 """
83 _launch_main_app(
84 root_path,
85 local_exec_service,
86 (
87 " --config cli/mock-bench.jsonc"
88 " --trial_config_repeat_count 5"
89 " --mock_env_seed -1" # Deterministic Mock Environment.
90 ),
91 [
92 f"^{_RE_DATE} run\\.py:\\d+ " + r"_main INFO Final score: \{'score': 67\.40\d+\}\s*$",
93 ],
94 )
97def test_launch_main_app_bench_values(
98 root_path: str,
99 local_exec_service: LocalExecService,
100) -> None:
101 """Run mlos_bench command-line application with mock benchmark config and user-
102 specified tunable values and check the results in the log.
103 """
104 _launch_main_app(
105 root_path,
106 local_exec_service,
107 (
108 " --config cli/mock-bench.jsonc"
109 " --tunable_values tunable-values/tunable-values-example.jsonc"
110 " --trial_config_repeat_count 5"
111 " --mock_env_seed -1" # Deterministic Mock Environment.
112 ),
113 [
114 f"^{_RE_DATE} run\\.py:\\d+ " + r"_main INFO Final score: \{'score': 67\.11\d+\}\s*$",
115 ],
116 )
119def test_launch_main_app_opt(root_path: str, local_exec_service: LocalExecService) -> None:
120 """Run mlos_bench command-line application with mock optimization config and check
121 the results in the log.
122 """
123 _launch_main_app(
124 root_path,
125 local_exec_service,
126 (
127 "--config cli/mock-opt.jsonc"
128 " --trial_config_repeat_count 3"
129 " --max_suggestions 3"
130 " --mock_env_seed 42" # Noisy Mock Environment.
131 ),
132 [
133 # Iteration 1: Expect first value to be the baseline
134 f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ "
135 + r"bulk_register DEBUG Warm-up END: .* :: \{'score': 64\.53\d+\}$",
136 # Iteration 2: The result may not always be deterministic
137 f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ "
138 + r"bulk_register DEBUG Warm-up END: .* :: \{'score': \d+\.\d+\}$",
139 # Iteration 3: non-deterministic (depends on the optimizer)
140 f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ "
141 + r"bulk_register DEBUG Warm-up END: .* :: \{'score': \d+\.\d+\}$",
142 # Final result: baseline is the optimum for the mock environment
143 f"^{_RE_DATE} run\\.py:\\d+ " + r"_main INFO Final score: \{'score': 64\.53\d+\}\s*$",
144 ],
145 )