forked from devendrachaplot/Neural-SLAM
-
Notifications
You must be signed in to change notification settings - Fork 0
/
optimization.py
65 lines (59 loc) · 2.24 KB
/
optimization.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import inspect
import re
from torch import optim
def get_optimizer(parameters, s):
"""
Parse optimizer parameters.
Input should be of the form:
- "sgd,lr=0.01"
- "adagrad,lr=0.1,lr_decay=0.05"
"""
# todo 这里只是为了读入参数,可以用其他方式实现
if "," in s:
method = s[:s.find(',')]
optim_params = {}
# 除去method之外的参数,按照','进行分割[lr=0.1,lr_decay=0.05]
for x in s[s.find(',') + 1:].split(','):
# lr=0.1
split = x.split('=')
# [lr,0.1]
assert len(split) == 2
# 正则表达式
assert re.match("^[+-]?(\d+(\.\d*)?|\.\d+)$", split[1]) is not None
optim_params[split[0]] = float(split[1])
else:
method = s
optim_params = {}
if method == 'adadelta':
optim_fn = optim.Adadelta
elif method == 'adagrad':
optim_fn = optim.Adagrad
elif method == 'adam':
optim_fn = optim.Adam
# 如果有beta1就get,否则为0.5
optim_params['betas'] = (optim_params.get('beta1', 0.5), optim_params.get('beta2', 0.999))
# 真正的使用是:out = dict.pop('key'[,default]),没有的key就返回default
# 去除原来的beta1
optim_params.pop('beta1', None)
optim_params.pop('beta2', None)
elif method == 'adamax':
optim_fn = optim.Adamax
elif method == 'asgd':
optim_fn = optim.ASGD
elif method == 'rmsprop':
optim_fn = optim.RMSprop
elif method == 'rprop':
optim_fn = optim.Rprop
elif method == 'sgd':
optim_fn = optim.SGD
assert 'lr' in optim_params
else:
raise Exception('Unknown optimization method: "%s"' % method)
# check that we give good parameters to the optimizer
# 判断给定的参数是否在optimize里面,如果不在则抛出异常
expected_args = inspect.getargspec(optim_fn.__init__)[0]
assert expected_args[:2] == ['self', 'params']
if not all(k in expected_args[2:] for k in optim_params.keys()):
raise Exception('Unexpected parameters: expected "%s", got "%s"' % (
str(expected_args[2:]), str(optim_params.keys())))
return optim_fn(parameters, **optim_params)