@@ -30,40 +30,40 @@ object TrainMnist {
30
30
// multi-layer perceptron
31
31
def getMlp : Symbol = {
32
32
val data = Symbol .Variable (" data" )
33
- val fc1 = Symbol .FullyConnected (name = " fc1" )()(Map (" data" -> data, " num_hidden" -> 128 ))
34
- val act1 = Symbol .Activation (name = " relu1" )()(Map (" data" -> fc1, " act_type" -> " relu" ))
35
- val fc2 = Symbol .FullyConnected (name = " fc2" )()(Map (" data" -> act1, " num_hidden" -> 64 ))
36
- val act2 = Symbol .Activation (name = " relu2" )()(Map (" data" -> fc2, " act_type" -> " relu" ))
37
- val fc3 = Symbol .FullyConnected (name = " fc3" )()(Map (" data" -> act2, " num_hidden" -> 10 ))
38
- val mlp = Symbol .SoftmaxOutput (name = " softmax" )()(Map (" data" -> fc3))
33
+
34
+ val fc1 = Symbol .api.FullyConnected (data = Some (data), num_hidden = 128 , name = " fc1" )
35
+ val act1 = Symbol .api.Activation (data = Some (fc1), " relu" , name = " relu" )
36
+ val fc2 = Symbol .api.FullyConnected (Some (act1), None , None , 64 , name = " fc2" )
37
+ val act2 = Symbol .api.Activation (data = Some (fc2), " relu" , name = " relu2" )
38
+ val fc3 = Symbol .api.FullyConnected (Some (act2), None , None , 10 , name = " fc3" )
39
+ val mlp = Symbol .api.SoftmaxOutput (name = " softmax" , data = Some (fc3))
39
40
mlp
40
41
}
41
42
42
43
// LeCun, Yann, Leon Bottou, Yoshua Bengio, and Patrick
43
44
// Haffner. "Gradient-based learning applied to document recognition."
44
45
// Proceedings of the IEEE (1998)
46
+
45
47
def getLenet : Symbol = {
46
48
val data = Symbol .Variable (" data" )
47
49
// first conv
48
- val conv1 = Symbol .Convolution ()()(
49
- Map (" data" -> data, " kernel" -> " (5, 5)" , " num_filter" -> 20 ))
50
- val tanh1 = Symbol .Activation ()()(Map (" data" -> conv1, " act_type" -> " tanh" ))
51
- val pool1 = Symbol .Pooling ()()(Map (" data" -> tanh1, " pool_type" -> " max" ,
52
- " kernel" -> " (2, 2)" , " stride" -> " (2, 2)" ))
50
+ val conv1 = Symbol .api.Convolution (data = Some (data), kernel = Shape (5 , 5 ), num_filter = 20 )
51
+ val tanh1 = Symbol .api.tanh(data = Some (conv1))
52
+ val pool1 = Symbol .api.Pooling (data = Some (tanh1), pool_type = Some (" max" ),
53
+ kernel = Some (Shape (2 , 2 )), stride = Some (Shape (2 , 2 )))
53
54
// second conv
54
- val conv2 = Symbol .Convolution ()()(
55
- Map (" data" -> pool1, " kernel" -> " (5, 5)" , " num_filter" -> 50 ))
56
- val tanh2 = Symbol .Activation ()()(Map (" data" -> conv2, " act_type" -> " tanh" ))
57
- val pool2 = Symbol .Pooling ()()(Map (" data" -> tanh2, " pool_type" -> " max" ,
58
- " kernel" -> " (2, 2)" , " stride" -> " (2, 2)" ))
55
+ val conv2 = Symbol .api.Convolution (data = Some (pool1), kernel = Shape (5 , 5 ), num_filter = 50 )
56
+ val tanh2 = Symbol .api.tanh(data = Some (conv2))
57
+ val pool2 = Symbol .api.Pooling (data = Some (tanh2), pool_type = Some (" max" ),
58
+ kernel = Some (Shape (2 , 2 )), stride = Some (Shape (2 , 2 )))
59
59
// first fullc
60
- val flatten = Symbol .Flatten ()()( Map ( " data" -> pool2))
61
- val fc1 = Symbol .FullyConnected ()()( Map ( " data" -> flatten, " num_hidden" -> 500 ) )
62
- val tanh3 = Symbol .Activation ()()( Map ( " data" -> fc1, " act_type " -> " tanh " ))
60
+ val flatten = Symbol .api. Flatten (data = Some ( pool2))
61
+ val fc1 = Symbol .api. FullyConnected (data = Some ( flatten), num_hidden = 500 )
62
+ val tanh3 = Symbol .api.tanh( data = Some ( fc1))
63
63
// second fullc
64
- val fc2 = Symbol .FullyConnected ()()( Map ( " data" -> tanh3, " num_hidden" -> 10 ) )
64
+ val fc2 = Symbol .api. FullyConnected (data = Some ( tanh3), num_hidden = 10 )
65
65
// loss
66
- val lenet = Symbol .SoftmaxOutput (name = " softmax" )()( Map ( " data" -> fc2))
66
+ val lenet = Symbol .api. SoftmaxOutput (name = " softmax" , data = Some ( fc2))
67
67
lenet
68
68
}
69
69
0 commit comments