Coverage for mlos_bench/mlos_bench/tests/optimizers/mock_opt_test.py: 100%

36 statements  

« prev     ^ index     » next       coverage.py v7.6.1, created at 2024-10-07 01:52 +0000

1# 

2# Copyright (c) Microsoft Corporation. 

3# Licensed under the MIT License. 

4# 

5"""Unit tests for mock mlos_bench optimizer.""" 

6 

7import pytest 

8 

9from mlos_bench.environments.status import Status 

10from mlos_bench.optimizers.mock_optimizer import MockOptimizer 

11 

12# pylint: disable=redefined-outer-name 

13 

14 

15@pytest.fixture 

16def mock_configurations_no_defaults() -> list: 

17 """A list of 2-tuples of (tunable_values, score) to test the optimizers.""" 

18 return [ 

19 ( 

20 { 

21 "vmSize": "Standard_B4ms", 

22 "idle": "halt", 

23 "kernel_sched_migration_cost_ns": 13112, 

24 "kernel_sched_latency_ns": 796233790, 

25 }, 

26 88.88, 

27 ), 

28 ( 

29 { 

30 "vmSize": "Standard_B2ms", 

31 "idle": "halt", 

32 "kernel_sched_migration_cost_ns": 117026, 

33 "kernel_sched_latency_ns": 149827706, 

34 }, 

35 66.66, 

36 ), 

37 ( 

38 { 

39 "vmSize": "Standard_B4ms", 

40 "idle": "halt", 

41 "kernel_sched_migration_cost_ns": 354785, 

42 "kernel_sched_latency_ns": 795285932, 

43 }, 

44 99.99, 

45 ), 

46 ] 

47 

48 

49@pytest.fixture 

50def mock_configurations(mock_configurations_no_defaults: list) -> list: 

51 """A list of 2-tuples of (tunable_values, score) to test the optimizers.""" 

52 return [ 

53 ( 

54 { 

55 "vmSize": "Standard_B4ms", 

56 "idle": "halt", 

57 "kernel_sched_migration_cost_ns": -1, 

58 "kernel_sched_latency_ns": 2000000, 

59 }, 

60 88.88, 

61 ), 

62 ] + mock_configurations_no_defaults 

63 

64 

65def _optimize(mock_opt: MockOptimizer, mock_configurations: list) -> float: 

66 """Run several iterations of the optimizer and return the best score.""" 

67 for tunable_values, score in mock_configurations: 

68 assert mock_opt.not_converged() 

69 tunables = mock_opt.suggest() 

70 assert tunables.get_param_values() == tunable_values 

71 mock_opt.register(tunables, Status.SUCCEEDED, {"score": score}) 

72 

73 (scores, _tunables) = mock_opt.get_best_observation() 

74 assert scores is not None 

75 assert len(scores) == 1 

76 return scores["score"] 

77 

78 

79def test_mock_optimizer(mock_opt: MockOptimizer, mock_configurations: list) -> None: 

80 """Make sure that mock optimizer produces consistent suggestions.""" 

81 score = _optimize(mock_opt, mock_configurations) 

82 assert score == pytest.approx(66.66, 0.01) 

83 

84 

85def test_mock_optimizer_no_defaults( 

86 mock_opt_no_defaults: MockOptimizer, 

87 mock_configurations_no_defaults: list, 

88) -> None: 

89 """Make sure that mock optimizer produces consistent suggestions.""" 

90 score = _optimize(mock_opt_no_defaults, mock_configurations_no_defaults) 

91 assert score == pytest.approx(66.66, 0.01) 

92 

93 

94def test_mock_optimizer_max(mock_opt_max: MockOptimizer, mock_configurations: list) -> None: 

95 """Check the maximization mode of the mock optimizer.""" 

96 score = _optimize(mock_opt_max, mock_configurations) 

97 assert score == pytest.approx(99.99, 0.01) 

98 

99 

100def test_mock_optimizer_register_fail(mock_opt: MockOptimizer) -> None: 

101 """Check the input acceptance conditions for Optimizer.register().""" 

102 tunables = mock_opt.suggest() 

103 mock_opt.register(tunables, Status.SUCCEEDED, {"score": 10}) 

104 mock_opt.register(tunables, Status.FAILED) 

105 with pytest.raises(ValueError): 

106 mock_opt.register(tunables, Status.SUCCEEDED, None) 

107 with pytest.raises(ValueError): 

108 mock_opt.register(tunables, Status.FAILED, {"score": 10})