1414
1515import tempfile
1616import unittest
17+ from functools import partial
1718
1819import numpy as np
1920import paddle
21+ import paddle .optimizer
2022import paddle .static
2123from paddle .fluid .tests .unittests .ipu .op_test_ipu import IPUOpTest
2224
@@ -28,7 +30,8 @@ def setUp(self):
2830 self .set_atol ()
2931 self .set_data_feed ()
3032 self .set_feed_attr ()
31- self .set_op_attrs ()
33+ self .set_attrs ()
34+ self .set_optimizer ()
3235
3336 def set_data_feed (self ):
3437 data = np .random .uniform (size = [1 , 3 , 10 , 10 ])
@@ -39,15 +42,16 @@ def set_feed_attr(self):
3942 self .feed_shape = [x .shape for x in self .feed_fp32 .values ()]
4043 self .feed_list = list (self .feed_fp32 .keys ())
4144
42- def set_op_attrs (self ):
45+ def set_attrs (self ):
4346 self .attrs = {}
4447 self .attrs ['steps' ] = 100
4548 self .attrs ['save_at_step' ] = 20
46- self .attrs ['is_training' ] = True
47- self .attrs ['opt_type' ] = 'sgd'
4849 self .attrs ['enable_fp16' ] = False
4950 self .attrs ['model_path' ] = tempfile .TemporaryDirectory ()
5051
52+ def set_optimizer (self ):
53+ self .optimizer = partial (paddle .optimizer .SGD , learning_rate = 1e-1 )
54+
5155 def _test_base (self , save_otherwise_load ):
5256 scope = paddle .static .Scope ()
5357 main_prog = paddle .static .Program ()
@@ -71,16 +75,8 @@ def _test_base(self, save_otherwise_load):
7175 name = 'conv2d' )
7276 loss = paddle .mean (conv1 )
7377
74- if self .attrs ['is_training' ]:
75- if self .attrs ['opt_type' ] == 'sgd' :
76- sgd = paddle .optimizer .SGD (learning_rate = 1e-2 )
77- sgd .minimize (loss )
78- elif self .attrs ['opt_type' ] == 'adam' :
79- adam = paddle .optimizer .Adam (learning_rate = 1e-2 )
80- adam .minimize (loss )
81- elif self .attrs ['opt_type' ] == 'lamb' :
82- lamb = paddle .optimizer .Lamb (learning_rate = 1e-2 )
83- lamb .minimize (loss )
78+ # apply optimizer
79+ self .optimizer ().minimize (loss )
8480 fetch_list = [loss .name ]
8581
8682 place = paddle .IPUPlace ()
@@ -91,8 +87,7 @@ def _test_base(self, save_otherwise_load):
9187 paddle .static .load (main_prog , self .attrs ['model_path' ].name )
9288
9389 ipu_strategy = paddle .static .IpuStrategy ()
94- ipu_strategy .set_graph_config (
95- is_training = self .attrs ['is_training' ])
90+ ipu_strategy .set_graph_config (is_training = True )
9691 ipu_strategy .set_precision_config (
9792 enable_fp16 = self .attrs ['enable_fp16' ])
9893 ipu_program = paddle .static .IpuCompiledProgram (
@@ -131,62 +126,109 @@ def test_base(self):
131126 self .attrs ['model_path' ].cleanup ()
132127
133128
129+ class TestMomentum (TestBase ):
130+ def set_optimizer (self ):
131+ self .optimizer = partial (paddle .optimizer .Momentum , learning_rate = 1e-1 )
132+
133+
134134class TestAdam (TestBase ):
135- def set_op_attrs (self ):
136- self .attrs = {}
137- self .attrs ['steps' ] = 100
138- self .attrs ['save_at_step' ] = 20
139- self .attrs ['is_training' ] = True
140- self .attrs ['opt_type' ] = 'adam'
141- self .attrs ['enable_fp16' ] = False
142- self .attrs ['model_path' ] = tempfile .TemporaryDirectory ()
135+ def set_optimizer (self ):
136+ self .optimizer = partial (paddle .optimizer .Adam , learning_rate = 1e-1 )
143137
144138
145139class TestLamb (TestBase ):
146- def set_op_attrs (self ):
147- self .attrs = {}
148- self .attrs ['steps' ] = 100
149- self .attrs ['save_at_step' ] = 20
150- self .attrs ['is_training' ] = True
151- self .attrs ['opt_type' ] = 'lamb'
152- self .attrs ['enable_fp16' ] = False
153- self .attrs ['model_path' ] = tempfile .TemporaryDirectory ()
140+ def set_optimizer (self ):
141+ self .optimizer = partial (paddle .optimizer .Lamb , learning_rate = 1e-1 )
142+
143+
144+ class TestAdamW (TestBase ):
145+ def set_optimizer (self ):
146+ self .optimizer = partial (paddle .optimizer .AdamW , learning_rate = 1e-1 )
147+
148+
149+ class TestAdamax (TestBase ):
150+ def set_optimizer (self ):
151+ self .optimizer = partial (paddle .optimizer .Adamax , learning_rate = 1e-1 )
152+
153+
154+ class TestAdagrad (TestBase ):
155+ def set_optimizer (self ):
156+ self .optimizer = partial (paddle .optimizer .Adagrad , learning_rate = 1e-1 )
157+
158+
159+ class TestAdadelta (TestBase ):
160+ def set_optimizer (self ):
161+ self .optimizer = partial (paddle .optimizer .Adagrad , learning_rate = 1e-1 )
162+
163+
164+ class TestRMSProp (TestBase ):
165+ def set_optimizer (self ):
166+ self .optimizer = partial (paddle .optimizer .RMSProp , learning_rate = 1e-1 )
167+
168+
169+ class TestCenteredRMSProp (TestBase ):
170+ def set_optimizer (self ):
171+ self .optimizer = partial (
172+ paddle .optimizer .RMSProp , learning_rate = 1e-1 , centered = True )
154173
155174
156175@unittest .skipIf (IPUOpTest .use_ipumodel (), "skip for ipumodel" )
157176class TestSGDFP16 (TestBase ):
158- def set_op_attrs (self ):
177+ def set_attrs (self ):
159178 self .attrs = {}
160179 self .attrs ['steps' ] = 100
161180 self .attrs ['save_at_step' ] = 20
162- self .attrs ['is_training' ] = True
163- self .attrs ['opt_type' ] = 'sgd'
164181 self .attrs ['enable_fp16' ] = True
165182 self .attrs ['model_path' ] = tempfile .TemporaryDirectory ()
166183
184+ def set_optimizer (self ):
185+ self .optimizer = partial (paddle .optimizer .SGD , learning_rate = 1e-1 )
167186
168- @unittest .skipIf (IPUOpTest .use_ipumodel (), "skip for ipumodel" )
169- class TestAdamFP16 (TestBase ):
170- def set_op_attrs (self ):
171- self .attrs = {}
172- self .attrs ['steps' ] = 100
173- self .attrs ['save_at_step' ] = 20
174- self .attrs ['is_training' ] = True
175- self .attrs ['opt_type' ] = 'adam'
176- self .attrs ['enable_fp16' ] = True
177- self .attrs ['model_path' ] = tempfile .TemporaryDirectory ()
178187
188+ class TestMomentumFp16 (TestSGDFP16 ):
189+ def set_optimizer (self ):
190+ self .optimizer = partial (paddle .optimizer .Momentum , learning_rate = 1e-1 )
179191
180- @unittest .skipIf (IPUOpTest .use_ipumodel (), "skip for ipumodel" )
181- class TestLambFP16 (TestBase ):
182- def set_op_attrs (self ):
183- self .attrs = {}
184- self .attrs ['steps' ] = 100
185- self .attrs ['save_at_step' ] = 20
186- self .attrs ['is_training' ] = True
187- self .attrs ['opt_type' ] = 'lamb'
188- self .attrs ['enable_fp16' ] = True
189- self .attrs ['model_path' ] = tempfile .TemporaryDirectory ()
192+
193+ class TestAdamFP16 (TestSGDFP16 ):
194+ def set_optimizer (self ):
195+ self .optimizer = partial (paddle .optimizer .Adam , learning_rate = 1e-1 )
196+
197+
198+ class TestLambFP16 (TestSGDFP16 ):
199+ def set_optimizer (self ):
200+ self .optimizer = partial (paddle .optimizer .Lamb , learning_rate = 1e-1 )
201+
202+
203+ class TestAdamWFP16FP16 (TestSGDFP16 ):
204+ def set_optimizer (self ):
205+ self .optimizer = partial (paddle .optimizer .AdamW , learning_rate = 1e-1 )
206+
207+
208+ class TestAdamaxFP16 (TestSGDFP16 ):
209+ def set_optimizer (self ):
210+ self .optimizer = partial (paddle .optimizer .Adamax , learning_rate = 1e-1 )
211+
212+
213+ class TestAdagradFP16 (TestSGDFP16 ):
214+ def set_optimizer (self ):
215+ self .optimizer = partial (paddle .optimizer .Adagrad , learning_rate = 1e-1 )
216+
217+
218+ class TestAdadeltaFP16 (TestSGDFP16 ):
219+ def set_optimizer (self ):
220+ self .optimizer = partial (paddle .optimizer .Adagrad , learning_rate = 1e-1 )
221+
222+
223+ class TestRMSPropFP16 (TestSGDFP16 ):
224+ def set_optimizer (self ):
225+ self .optimizer = partial (paddle .optimizer .RMSProp , learning_rate = 1e-1 )
226+
227+
228+ class TestCenteredRMSPropFP16 (TestSGDFP16 ):
229+ def set_optimizer (self ):
230+ self .optimizer = partial (
231+ paddle .optimizer .RMSProp , learning_rate = 1e-1 , centered = True )
190232
191233
192234if __name__ == "__main__" :
0 commit comments