-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathpytorch_nets.py
40 lines (34 loc) · 1.28 KB
/
pytorch_nets.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import torch
import torch.nn as nn
class TanhMultiplier(nn.Module):
def __init__(self):
super(TanhMultiplier, self).__init__()
self.multiplier = nn.Parameter(torch.ones(1))
def forward(self, inputs):
exp_multiplier = torch.exp(self.multiplier)
return torch.tanh(inputs / exp_multiplier) * exp_multiplier
class ForwardModel(nn.Module):
def __init__(
self,
input_shape,
activations=("relu", "relu"),
hidden_size=2048,
final_tanh=False,
):
"""Neural network used as surrogate of the objective function.
"""
super(ForwardModel, self).__init__()
layers = [nn.Linear(input_shape, hidden_size)]
for act in activations:
if act == "leaky_relu":
layers.extend([nn.Linear(hidden_size, hidden_size), nn.LeakyReLU()])
elif isinstance(act, str):
layers.extend([nn.Linear(hidden_size, hidden_size), nn.ReLU()])
else:
layers.extend([nn.Linear(hidden_size, hidden_size), act])
layers.extend([nn.Linear(hidden_size, 1)])
if final_tanh:
layers.extend([TanhMultiplier()])
self.model = nn.Sequential(*layers)
def forward(self, inputs):
return self.model(inputs)