Skip to content

Commit

Permalink
Always compute gradients in transformed (0,1) space.
Browse files Browse the repository at this point in the history
  • Loading branch information
rolfjl committed Sep 1, 2023
1 parent 8d11f72 commit c98ea01
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions popt/update_schemes/enopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def calc_update(self, iteration, logger=None):
aug_state = ot.aug_optim_state(current_state, list_states)

# Compute the steepest ascent step. Scale the gradient with 2-norm (or inf-norm: np.inf)
normalize = np.maximum(la.norm(self.sens_matrix, np.inf), 1e-6)
normalize = np.maximum(la.norm(self.sens_matrix, np.inf), 1e-12)
H = 1
if self.hessian:
H = 1 / np.diag(self.cov_sens_matrix)
Expand Down Expand Up @@ -130,7 +130,7 @@ def calc_update(self, iteration, logger=None):
self.step = new_step

# Update covariance (currently we don't apply backtracking for alpha_cov)
normalize = np.maximum(la.norm(self.cov_sens_matrix, np.inf), 1e-6)
normalize = np.maximum(la.norm(self.cov_sens_matrix, np.inf), 1e-12)
self.cov_step = self.alpha_cov * self.cov_sens_matrix / normalize + beta * self.cov_step
self.cov = self.cov + self.cov_step
self.cov = self.get_sym_pos_semidef(self.cov)
Expand Down Expand Up @@ -266,7 +266,7 @@ def _ext_enopt_param(self):

# Check if Hessian should be used
ind_hessian = bt.index2d(enopt, 'hessian')
if ind_hessian is None: # num_models does not exist
if ind_hessian is None: # do not use Hessian
self.hessian = None
else: # use Hessian
self.hessian = True
Expand Down Expand Up @@ -316,6 +316,7 @@ def calc_ensemble_sensitivity(self):
self.state = self._gen_state_ensemble()
self._invert_scale_state()
self.calc_prediction()
self._scale_state()
self.num_func_eval += self.ne
obj_func_values = self.obj_func(
self.pred_data, self.keys_opt, self.sim.true_order)
Expand Down

0 comments on commit c98ea01

Please sign in to comment.