File tree Expand file tree Collapse file tree 3 files changed +11
-1
lines changed
configs/lunarlander_continuous_v2 Expand file tree Collapse file tree 3 files changed +11
-1
lines changed Original file line number Diff line number Diff line change @@ -27,6 +27,7 @@ learner_cfg:
27
27
type : " GaussianDist"
28
28
configs :
29
29
hidden_sizes : [256, 256]
30
+ hidden_activation : " tanh"
30
31
output_activation : " identity"
31
32
fixed_logstd : True
32
33
critic :
Original file line number Diff line number Diff line change @@ -28,6 +28,11 @@ def identity(x: torch.Tensor) -> torch.Tensor:
28
28
return x
29
29
30
30
31
+ def relu (x : torch .Tensor ) -> torch .Tensor :
32
+ """Return torch.relu(x)"""
33
+ return torch .relu (x )
34
+
35
+
31
36
def soft_update (local : nn .Module , target : nn .Module , tau : float ):
32
37
"""Soft-update: target = tau*local + (1-tau)*target."""
33
38
for t_param , l_param in zip (target .parameters (), local .parameters ()):
Original file line number Diff line number Diff line change @@ -61,7 +61,11 @@ def __init__(
61
61
self .hidden_sizes = configs .hidden_sizes
62
62
self .input_size = configs .input_size
63
63
self .output_size = configs .output_size
64
- self .hidden_activation = hidden_activation
64
+ self .hidden_activation = (
65
+ getattr (helper_functions , configs .hidden_activation )
66
+ if "hidden_activation" in configs .keys ()
67
+ else hidden_activation
68
+ )
65
69
self .output_activation = getattr (helper_functions , configs .output_activation )
66
70
self .linear_layer = linear_layer
67
71
self .use_output_layer = use_output_layer
You can’t perform that action at this time.
0 commit comments