Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion mlos_bench/mlos_bench/launcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def __init__(self, description: str, long_text: str = "", argv: Optional[List[st
else:
config = {}

self.trial_config_repeat_count = args.trial_config_repeat_count or config.get("trial_config_repeat_count", 1)
self.trial_config_repeat_count: int = args.trial_config_repeat_count or config.get("trial_config_repeat_count", 1)

log_level = args.log_level or config.get("log_level", _LOG_LEVEL)
try:
Expand Down Expand Up @@ -333,13 +333,17 @@ def _load_optimizer(self, args_optimizer: Optional[str]) -> Optimizer:
in the --optimizer command line option. If config file not specified,
create a one-shot optimizer to run a single benchmark trial.
"""
if 'max_iterations' in self.global_config:
self.global_config['max_iterations'] *= self.trial_config_repeat_count
if args_optimizer is None:
# global_config may contain additional properties, so we need to
# strip those out before instantiating the basic oneshot optimizer.
config = {key: val for key, val in self.global_config.items() if key in OneShotOptimizer.BASE_SUPPORTED_CONFIG_PROPS}
return OneShotOptimizer(
self.tunables, config=config, service=self._parent_service)
class_config = self._config_loader.load_config(args_optimizer, ConfigSchema.OPTIMIZER)
if 'max_iterations' in class_config:
class_config['max_iterations'] *= self.trial_config_repeat_count
assert isinstance(class_config, Dict)
optimizer = self._config_loader.build_optimizer(tunables=self.tunables,
service=self._parent_service,
Expand Down
10 changes: 10 additions & 0 deletions mlos_bench/mlos_bench/optimizers/base_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,16 @@ def __exit__(self, ex_type: Optional[Type[BaseException]],
self._in_context = False
return False # Do not suppress exceptions

@property
def max_iterations(self) -> int:
"""
The maximum number of iterations (trials) to run.

Note: this may or may not be the same as the number of configurations.
See Also: Launcher.trial_config_repeat_count.
"""
return self._max_iter

@property
def seed(self) -> int:
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,14 @@
"bar"
],

"optimizer": "optimizers/mlos_core_default_opt.jsonc",

"services": [
"services/remote/mock/mock_fileshare_service.jsonc"
],

"trial_config_repeat_count": 1,

"random_seed": 42,
"random_init": true
}
25 changes: 20 additions & 5 deletions mlos_bench/mlos_bench/tests/launcher_parse_args_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import pytest

from mlos_bench.launcher import Launcher
from mlos_bench.optimizers import MockOptimizer
from mlos_bench.optimizers import OneShotOptimizer, MlosCoreOptimizer
from mlos_bench.os_environ import environ
from mlos_bench.config.schemas import ConfigSchema
from mlos_bench.util import path_join
Expand Down Expand Up @@ -100,9 +100,11 @@ def test_launcher_args_parse_1(config_paths: List[str]) -> None:
env_config = launcher.config_loader.load_config(env_conf_path, ConfigSchema.ENVIRONMENT)
assert check_class_name(launcher.environment, env_config['class'])
# Check that the optimizer looks right.
assert isinstance(launcher.optimizer, MockOptimizer)
assert isinstance(launcher.optimizer, OneShotOptimizer)
# Check that the optimizer got initialized with defaults.
assert launcher.optimizer.tunable_params.is_defaults()
assert launcher.trial_config_repeat_count == 1 # default when left unspecified
assert launcher.optimizer.max_iterations == 1 # value for OneShotOptimizer


def test_launcher_args_parse_2(config_paths: List[str]) -> None:
Expand All @@ -120,15 +122,17 @@ def test_launcher_args_parse_2(config_paths: List[str]) -> None:
environ['USER'] = environ['USERNAME']

config_file = 'cli/test-cli-config.jsonc'
globals_file = 'globals/global_test_config.jsonc'
cli_args = ' '.join([f"--config-path {config_path}" for config_path in config_paths]) + \
f' --config {config_file}' + \
' --service services/remote/mock/mock_auth_service.jsonc' + \
' --service services/remote/mock/mock_remote_exec_service.jsonc' + \
' --globals globals/global_test_config.jsonc' + \
f' --globals {globals_file}' + \
' --experiment_id MockeryExperiment' + \
' --no-teardown' + \
' --random-init' + \
' --random-seed 1234'
' --random-seed 1234' + \
' --trial-config-repeat-count 3'
launcher = Launcher(description=__name__, argv=cli_args.split())
# Check that the parent service
assert isinstance(launcher.service, SupportsAuth)
Expand Down Expand Up @@ -157,7 +161,18 @@ def test_launcher_args_parse_2(config_paths: List[str]) -> None:
assert check_class_name(launcher.environment, env_config['class'])

# Check that the optimizer looks right.
assert isinstance(launcher.optimizer, MockOptimizer)
assert isinstance(launcher.optimizer, MlosCoreOptimizer)
opt_config_file = config['optimizer']
opt_config = launcher.config_loader.load_config(opt_config_file, ConfigSchema.OPTIMIZER)
globals_file_config = launcher.config_loader.load_config(globals_file, ConfigSchema.GLOBALS)
assert launcher.trial_config_repeat_count == 3
# The actual global_config gets overwritten as a part of processing, so to test
# this we read the original value out of the source files.
orig_max_iters = globals_file_config.get('max_iterations', opt_config.get('config', {}).get('max_iterations', 100))
assert launcher.optimizer.max_iterations \
== launcher.trial_config_repeat_count * orig_max_iters \
== launcher.global_config['max_iterations']

# Check that the optimizer got initialized with random values instead of the defaults.
# Note: the environment doesn't get updated until suggest() is called to
# return these values in run.py.
Expand Down