Skip to content

Commit

Permalink
[tune] Deprecate ambiguous function values (use tune.function / tune.…
Browse files Browse the repository at this point in the history
…sample_from instead) (#3457)

* wip

* exclude
  • Loading branch information
ericl authored Dec 6, 2018
1 parent d864f29 commit 412aaa5
Show file tree
Hide file tree
Showing 13 changed files with 96 additions and 39 deletions.
2 changes: 1 addition & 1 deletion doc/source/tune-package-ref.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ ray.tune.suggest

.. automodule:: ray.tune.suggest
:members:
:exclude-members: function, grid_search, SuggestionAlgorithm
:exclude-members: function, sample_from, grid_search, SuggestionAlgorithm
:show-inheritance:

.. autoclass:: ray.tune.suggest.SuggestionAlgorithm
Expand Down
10 changes: 5 additions & 5 deletions doc/source/tune-usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -141,8 +141,8 @@ The following shows grid search over two nested parameters combined with random
"my_experiment_name": {
"run": my_trainable,
"config": {
"alpha": lambda spec: np.random.uniform(100),
"beta": lambda spec: spec.config.alpha * np.random.normal(),
"alpha": tune.sample_from(lambda spec: np.random.uniform(100)),
"beta": tune.sample_from(lambda spec: spec.config.alpha * np.random.normal()),
"nn_layers": [
tune.grid_search([16, 64, 256]),
tune.grid_search([16, 64, 256]),
Expand All @@ -153,7 +153,7 @@ The following shows grid search over two nested parameters combined with random
.. note::
Lambda functions will be evaluated during trial variant generation. If you need to pass a literal function in your config, use ``tune.function(...)`` to escape it.
Use ``tune.sample_from(...)`` to sample from a function during trial variant generation. If you need to pass a literal function in your config, use ``tune.function(...)`` to escape it.

For more information on variant generation, see `basic_variant.py <https://github.com/ray-project/ray/blob/master/python/ray/tune/suggest/basic_variant.py>`__.

Expand All @@ -169,8 +169,8 @@ By default, each random variable and grid search point is sampled once. To take
"my_experiment_name": {
"run": my_trainable,
"config": {
"alpha": lambda spec: np.random.uniform(100),
"beta": lambda spec: spec.config.alpha * np.random.normal(),
"alpha": tune.sample_from(lambda spec: np.random.uniform(100)),
"beta": tune.sample_from(lambda spec: spec.config.alpha * np.random.normal()),
"nn_layers": [
tune.grid_search([16, 64, 256]),
tune.grid_search([16, 64, 256]),
Expand Down
13 changes: 10 additions & 3 deletions python/ray/tune/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,16 @@
from ray.tune.experiment import Experiment
from ray.tune.registry import register_env, register_trainable
from ray.tune.trainable import Trainable
from ray.tune.suggest import grid_search, function
from ray.tune.suggest import grid_search, function, sample_from

__all__ = [
"Trainable", "TuneError", "grid_search", "register_env",
"register_trainable", "run_experiments", "Experiment", "function"
"Trainable",
"TuneError",
"grid_search",
"register_env",
"register_trainable",
"run_experiments",
"Experiment",
"function",
"sample_from",
]
8 changes: 5 additions & 3 deletions python/ray/tune/examples/async_hyperband_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import numpy as np

import ray
from ray.tune import Trainable, run_experiments
from ray.tune import Trainable, run_experiments, sample_from
from ray.tune.schedulers import AsyncHyperBandScheduler


Expand Down Expand Up @@ -76,8 +76,10 @@ def _restore(self, checkpoint_path):
"gpu": 0
},
"config": {
"width": lambda spec: 10 + int(90 * random.random()),
"height": lambda spec: int(100 * random.random()),
"width": sample_from(
lambda spec: 10 + int(90 * random.random())),
"height": sample_from(
lambda spec: int(100 * random.random())),
},
}
},
Expand Down
6 changes: 3 additions & 3 deletions python/ray/tune/examples/hyperband_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import numpy as np

import ray
from ray.tune import Trainable, run_experiments, Experiment
from ray.tune import Trainable, run_experiments, Experiment, sample_from
from ray.tune.schedulers import HyperBandScheduler


Expand Down Expand Up @@ -67,8 +67,8 @@ def _restore(self, checkpoint_path):
num_samples=20,
stop={"training_iteration": 1 if args.smoke_test else 99999},
config={
"width": lambda spec: 10 + int(90 * random.random()),
"height": lambda spec: int(100 * random.random())
"width": sample_from(lambda spec: 10 + int(90 * random.random())),
"height": sample_from(lambda spec: int(100 * random.random()))
})

run_experiments(exp, scheduler=hyperband)
6 changes: 4 additions & 2 deletions python/ray/tune/examples/mnist_pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,8 +182,10 @@ def test():
"run": "train_mnist",
"num_samples": 1 if args.smoke_test else 10,
"config": {
"lr": lambda spec: np.random.uniform(0.001, 0.1),
"momentum": lambda spec: np.random.uniform(0.1, 0.9),
"lr": tune.sample_from(
lambda spec: np.random.uniform(0.001, 0.1)),
"momentum": tune.sample_from(
lambda spec: np.random.uniform(0.1, 0.9)),
}
}
},
Expand Down
6 changes: 4 additions & 2 deletions python/ray/tune/examples/mnist_pytorch_trainable.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,8 +195,10 @@ def _restore(self, checkpoint_path):
"checkpoint_at_end": True,
"config": {
"args": args,
"lr": lambda spec: np.random.uniform(0.001, 0.1),
"momentum": lambda spec: np.random.uniform(0.1, 0.9),
"lr": tune.sample_from(
lambda spec: np.random.uniform(0.001, 0.1)),
"momentum": tune.sample_from(
lambda spec: np.random.uniform(0.1, 0.9)),
}
}
},
Expand Down
14 changes: 7 additions & 7 deletions python/ray/tune/examples/pbt_ppo_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import random

import ray
from ray.tune import run_experiments
from ray.tune import run_experiments, sample_from
from ray.tune.schedulers import PopulationBasedTraining

if __name__ == "__main__":
Expand Down Expand Up @@ -63,12 +63,12 @@ def explore(config):
"clip_param": 0.2,
"lr": 1e-4,
# These params start off randomly drawn from a set.
"num_sgd_iter":
lambda spec: random.choice([10, 20, 30]),
"sgd_minibatch_size":
lambda spec: random.choice([128, 512, 2048]),
"train_batch_size":
lambda spec: random.choice([10000, 20000, 40000])
"num_sgd_iter": sample_from(
lambda spec: random.choice([10, 20, 30])),
"sgd_minibatch_size": sample_from(
lambda spec: random.choice([128, 512, 2048])),
"train_batch_size": sample_from(
lambda spec: random.choice([10000, 20000, 40000]))
},
},
},
Expand Down
4 changes: 2 additions & 2 deletions python/ray/tune/examples/pbt_tune_cifar10_with_keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator

import ray
from ray.tune import grid_search, run_experiments
from ray.tune import grid_search, run_experiments, sample_from
from ray.tune import Trainable
from ray.tune.schedulers import PopulationBasedTraining

Expand Down Expand Up @@ -193,7 +193,7 @@ def _stop(self):
"epochs": 1,
"batch_size": 64,
"lr": grid_search([10**-4, 10**-5]),
"decay": lambda spec: spec.config.lr / 100.0,
"decay": sample_from(lambda spec: spec.config.lr / 100.0),
"dropout": grid_search([0.25, 0.5]),
},
"num_samples": 4,
Expand Down
12 changes: 8 additions & 4 deletions python/ray/tune/examples/tune_mnist_keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,10 +192,14 @@ def create_parser():
"gpu": 0.5 if args.use_gpu else 0
},
"config": {
"lr": lambda spec: np.random.uniform(0.001, 0.1),
"momentum": lambda spec: np.random.uniform(0.1, 0.9),
"hidden": lambda spec: np.random.randint(32, 512),
"dropout1": lambda spec: np.random.uniform(0.2, 0.8),
"lr": tune.sample_from(
lambda spec: np.random.uniform(0.001, 0.1)),
"momentum": tune.sample_from(
lambda spec: np.random.uniform(0.1, 0.9)),
"hidden": tune.sample_from(
lambda spec: np.random.randint(32, 512)),
"dropout1": tune.sample_from(
lambda spec: np.random.uniform(0.2, 0.8)),
}
}
},
Expand Down
5 changes: 3 additions & 2 deletions python/ray/tune/examples/tune_mnist_ray_hyperband.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

import ray
from ray.tune import grid_search, run_experiments, register_trainable, \
Trainable
Trainable, sample_from
from ray.tune.schedulers import HyperBandScheduler
from tensorflow.examples.tutorials.mnist import input_data

Expand Down Expand Up @@ -221,7 +221,8 @@ def _restore(self, path):
'time_total_s': 600,
},
'config': {
'learning_rate': lambda spec: 10**np.random.uniform(-5, -3),
'learning_rate': sample_from(
lambda spec: 10**np.random.uniform(-5, -3)),
'activation': grid_search(['relu', 'elu', 'tanh']),
},
"num_samples": 10,
Expand Down
12 changes: 9 additions & 3 deletions python/ray/tune/suggest/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,15 @@
from ray.tune.suggest.basic_variant import BasicVariantGenerator
from ray.tune.suggest.suggestion import SuggestionAlgorithm
from ray.tune.suggest.hyperopt import HyperOptSearch
from ray.tune.suggest.variant_generator import grid_search, function
from ray.tune.suggest.variant_generator import grid_search, function, \
sample_from

__all__ = [
"SearchAlgorithm", "BasicVariantGenerator", "HyperOptSearch",
"SuggestionAlgorithm", "grid_search", "function"
"SearchAlgorithm",
"BasicVariantGenerator",
"HyperOptSearch",
"SuggestionAlgorithm",
"grid_search",
"function",
"sample_from",
]
37 changes: 35 additions & 2 deletions python/ray/tune/suggest/variant_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,15 @@
from __future__ import print_function

import copy
import logging
import numpy
import random
import types

from ray.tune import TuneError

logger = logging.getLogger(__name__)


def generate_variants(unresolved_spec):
"""Generates variants from a spec (dict) with unresolved values.
Expand Down Expand Up @@ -55,8 +58,29 @@ def grid_search(values):
return {"grid_search": values}


class sample_from(object):
"""Specify that tune should sample configuration values from this function.
The use of function arguments in tune configs must be disambiguated by
either wrapped the function in tune.eval() or tune.function().
Arguments:
func: An callable function to draw a sample from.
"""

def __init__(self, func):
self.func = func


class function(object):
"""Wraps `func` to make sure it is not expanded during resolution."""
"""Wraps `func` to make sure it is not expanded during resolution.
The use of function arguments in tune configs must be disambiguated by
either wrapped the function in tune.eval() or tune.function().
Arguments:
func: A function literal.
"""

def __init__(self, func):
self.func = func
Expand Down Expand Up @@ -203,8 +227,17 @@ def _is_resolved(v):

def _try_resolve(v):
if isinstance(v, types.FunctionType):
# Lambda function
logger.warn(
"Deprecation warning: Function values are ambiguous in Tune "
"configuations. Either wrap the function with "
"`tune.function(func)` to specify a function literal, or "
"`tune.sample_from(func)` to tell Tune to "
"sample values from the function during variant generation: "
"{}".format(v))
return False, v
elif isinstance(v, sample_from):
# Function to sample from
return False, v.func
elif isinstance(v, dict) and len(v) == 1 and "eval" in v:
# Lambda function in eval syntax
return False, lambda spec: eval(
Expand Down

0 comments on commit 412aaa5

Please sign in to comment.