Skip to content
Open

ppo #1514

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion nevergrad/functions/pyomo/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def _pyomo_value_assignment(self, k_model_variables: tp.Dict[str, tp.Any]) -> No
code_str = ""
for k in k_model_variables:
code_str += f"self._model_instance.{k} = k_model_variables['{k}']\n"
self._value_assignment_code_obj = compile(code_str, "<string>", "exec")
self._value_assignment_code_obj = compile(code_str, "<string>", "exec") # type: ignore
# TODO find a way to avoid exec
exec(self._value_assignment_code_obj) # pylint: disable=exec-used

Expand Down
4 changes: 4 additions & 0 deletions nevergrad/optimization/experimentalvariants.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,11 @@
"MetaModelFmin2", register=True
)
MetaModelFmin2.no_parallelization = True
LSCMA = ParametrizedCMA(high_speed=False).set_name("LSCMA", register=True)
HSCMA = ParametrizedCMA(high_speed=True).set_name("HSCMA", register=True)
HSNeuralCMA = ParametrizedCMA(high_speed=True, algorithm="neural").set_name("HSNeuralCMA", register=True)
HSSVMCMA = ParametrizedCMA(high_speed=True, algorithm="svm").set_name("HSSVMCMA", register=True)
HSRFCMA = ParametrizedCMA(high_speed=True, algorithm="rf").set_name("HSRFCMA", register=True)
HSMetaModel = ParametrizedMetaModel(multivariate_optimizer=HSCMA).set_name("HSMetaModel", register=True)

# OnePlusOne
Expand Down
31 changes: 23 additions & 8 deletions nevergrad/optimization/metamodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ class MetaModelFailure(ValueError):
"""Sometimes the optimum of the metamodel is at infinity."""


def learn_on_k_best(archive: utils.Archive[utils.MultiValue], k: int) -> tp.ArrayLike:
def learn_on_k_best(
archive: utils.Archive[utils.MultiValue], k: int, algorithm: str = "quad"
) -> tp.ArrayLike:
"""Approximate optimum learnt from the k best.

Parameters
Expand All @@ -34,21 +36,34 @@ def learn_on_k_best(archive: utils.Archive[utils.MultiValue], k: int) -> tp.Arra
y = np.asarray([archive[c[0]].get_estimation("pessimistic") for c in first_k_individuals])
X = np.asarray([(c[0] - middle) / normalization for c in first_k_individuals])

# We need SKLearn.
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures

polynomial_features = PolynomialFeatures(degree=2)
X2 = polynomial_features.fit_transform(X)

# Fit a linear model.
if not max(y) - min(y) > 1e-20: # better use "not" for dealing with nans
raise MetaModelFailure

y = (y - min(y)) / (max(y) - min(y))
model = LinearRegression()
model.fit(X2, y)
if algorithm == "neural":
from sklearn.neural_network import MLPRegressor

model = MLPRegressor(hidden_layer_sizes=(16, 16), solver="lbfgs")
elif algorithm in ["svm", "svr"]:
from sklearn.svm import SVR

model = SVR()
elif algorithm == "rf":
from sklearn.ensemble import RandomForestRegressor

model = RandomForestRegressor()
else:
assert algorithm == "quad", f"Metamodelling algorithm {algorithm} not recognized."
# We need SKLearn.
from sklearn.linear_model import LinearRegression

# Fit a linear model.
model = LinearRegression()

model.fit(X2, y)
# Check model quality.
model_outputs = model.predict(X2)
indices = np.argsort(y)
Expand Down
Loading