Coverage for mlos_bench/mlos_bench/tests/optimizers/conftest.py: 100%

31 statements  

« prev     ^ index     » next       coverage.py v7.6.1, created at 2024-10-07 01:52 +0000

1# 

2# Copyright (c) Microsoft Corporation. 

3# Licensed under the MIT License. 

4# 

5"""Test fixtures for mlos_bench optimizers.""" 

6 

7from typing import List 

8 

9import pytest 

10 

11from mlos_bench.optimizers.mlos_core_optimizer import MlosCoreOptimizer 

12from mlos_bench.optimizers.mock_optimizer import MockOptimizer 

13from mlos_bench.tests import SEED 

14from mlos_bench.tunables.tunable_groups import TunableGroups 

15 

16 

17@pytest.fixture 

18def mock_configs() -> List[dict]: 

19 """Mock configurations of earlier experiments.""" 

20 return [ 

21 { 

22 "vmSize": "Standard_B4ms", 

23 "idle": "halt", 

24 "kernel_sched_migration_cost_ns": 50000, 

25 "kernel_sched_latency_ns": 1000000, 

26 }, 

27 { 

28 "vmSize": "Standard_B4ms", 

29 "idle": "halt", 

30 "kernel_sched_migration_cost_ns": 40000, 

31 "kernel_sched_latency_ns": 2000000, 

32 }, 

33 { 

34 "vmSize": "Standard_B4ms", 

35 "idle": "mwait", 

36 "kernel_sched_migration_cost_ns": -1, # Special value 

37 "kernel_sched_latency_ns": 3000000, 

38 }, 

39 { 

40 "vmSize": "Standard_B2s", 

41 "idle": "mwait", 

42 "kernel_sched_migration_cost_ns": 200000, 

43 "kernel_sched_latency_ns": 4000000, 

44 }, 

45 ] 

46 

47 

48@pytest.fixture 

49def mock_opt_no_defaults(tunable_groups: TunableGroups) -> MockOptimizer: 

50 """Test fixture for MockOptimizer that ignores the initial configuration.""" 

51 return MockOptimizer( 

52 tunables=tunable_groups, 

53 service=None, 

54 config={ 

55 "optimization_targets": {"score": "min"}, 

56 "max_suggestions": 5, 

57 "start_with_defaults": False, 

58 "seed": SEED, 

59 }, 

60 ) 

61 

62 

63@pytest.fixture 

64def mock_opt(tunable_groups: TunableGroups) -> MockOptimizer: 

65 """Test fixture for MockOptimizer.""" 

66 return MockOptimizer( 

67 tunables=tunable_groups, 

68 service=None, 

69 config={"optimization_targets": {"score": "min"}, "max_suggestions": 5, "seed": SEED}, 

70 ) 

71 

72 

73@pytest.fixture 

74def mock_opt_max(tunable_groups: TunableGroups) -> MockOptimizer: 

75 """Test fixture for MockOptimizer.""" 

76 return MockOptimizer( 

77 tunables=tunable_groups, 

78 service=None, 

79 config={"optimization_targets": {"score": "max"}, "max_suggestions": 10, "seed": SEED}, 

80 ) 

81 

82 

83@pytest.fixture 

84def flaml_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer: 

85 """Test fixture for mlos_core FLAML optimizer.""" 

86 return MlosCoreOptimizer( 

87 tunables=tunable_groups, 

88 service=None, 

89 config={ 

90 "optimization_targets": {"score": "min"}, 

91 "max_suggestions": 15, 

92 "optimizer_type": "FLAML", 

93 "seed": SEED, 

94 }, 

95 ) 

96 

97 

98@pytest.fixture 

99def flaml_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer: 

100 """Test fixture for mlos_core FLAML optimizer.""" 

101 return MlosCoreOptimizer( 

102 tunables=tunable_groups, 

103 service=None, 

104 config={ 

105 "optimization_targets": {"score": "max"}, 

106 "max_suggestions": 15, 

107 "optimizer_type": "FLAML", 

108 "seed": SEED, 

109 }, 

110 ) 

111 

112 

113# FIXME: SMAC's RF model can be non-deterministic at low iterations, which are 

114# normally calculated as a percentage of the max_suggestions and number of 

115# tunable dimensions, so for now we set the initial random samples equal to the 

116# number of iterations and control them with a seed. 

117 

118SMAC_ITERATIONS = 10 

119 

120 

121@pytest.fixture 

122def smac_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer: 

123 """Test fixture for mlos_core SMAC optimizer.""" 

124 return MlosCoreOptimizer( 

125 tunables=tunable_groups, 

126 service=None, 

127 config={ 

128 "optimization_targets": {"score": "min"}, 

129 "max_suggestions": SMAC_ITERATIONS, 

130 "optimizer_type": "SMAC", 

131 "seed": SEED, 

132 "output_directory": None, 

133 # See Above 

134 "n_random_init": SMAC_ITERATIONS, 

135 "max_ratio": 1.0, 

136 }, 

137 ) 

138 

139 

140@pytest.fixture 

141def smac_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer: 

142 """Test fixture for mlos_core SMAC optimizer.""" 

143 return MlosCoreOptimizer( 

144 tunables=tunable_groups, 

145 service=None, 

146 config={ 

147 "optimization_targets": {"score": "max"}, 

148 "max_suggestions": SMAC_ITERATIONS, 

149 "optimizer_type": "SMAC", 

150 "seed": SEED, 

151 "output_directory": None, 

152 # See Above 

153 "n_random_init": SMAC_ITERATIONS, 

154 "max_ratio": 1.0, 

155 }, 

156 )