Skip to content

Commit

Permalink
policy init bug fix
Browse files Browse the repository at this point in the history
  • Loading branch information
nikhilbarhate99 authored Sep 26, 2019
1 parent d620d75 commit 64376aa
Showing 1 changed file with 2 additions and 0 deletions.
2 changes: 2 additions & 0 deletions PPO_continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,9 @@ def __init__(self, state_dim, action_dim, action_std, lr, betas, gamma, K_epochs

self.policy = ActorCritic(state_dim, action_dim, action_std).to(device)
self.optimizer = torch.optim.Adam(self.policy.parameters(), lr=lr, betas=betas)

self.policy_old = ActorCritic(state_dim, action_dim, action_std).to(device)
self.policy_old.load_state_dict(self.policy.state_dict())

self.MseLoss = nn.MSELoss()

Expand Down

0 comments on commit 64376aa

Please sign in to comment.