File tree 2 files changed +23
-1
lines changed 2 files changed +23
-1
lines changed Original file line number Diff line number Diff line change @@ -32,7 +32,7 @@ def base_test():
32
32
model1 = lambda x : F .softmax (F .elu (fc3 (x )))
33
33
model2 = lambda x : F .softmax (F .tanh (fc3 (x )))
34
34
model3 = lambda x : F .softmax (F .sigmoid (fc3 (x )))
35
- model4 = lambda x : softmax (F .leaky_relu (fc4 (x )))
35
+ model4 = lambda x : softmax (F .leaky_relu (fc4 (x ))). clone ()
36
36
model5 = lambda x : softmax (F .logsigmoid (fc4 (x .transpose (0 ,1 ))))
37
37
model6 = lambda x : fc3 (F .max_pool2d (x .unsqueeze (dim = 0 ),2 ).squeeze ())
38
38
model7 = lambda x : fc3 (F .max_pool2d (x .unsqueeze (dim = 0 ),2 ).squeeze (dim = 0 ))
Original file line number Diff line number Diff line change @@ -663,6 +663,28 @@ def free_tpl(self):
663
663
register (Abs , torch .autograd ._functions .pointwise .Abs )
664
664
665
665
666
+ class Clone (Emitter ):
667
+
668
+ def __init__ (self , obj , prevfns ):
669
+ Emitter .__init__ (self , obj , prevfns )
670
+ self .def_vars ({
671
+ 'input' : id (prevfns [0 ]),
672
+ })
673
+ self .infer_type_var = 'input'
674
+
675
+ def call_tpl (self ):
676
+ return '''
677
+ TH${T}Tensor *$id = TH${T}Tensor_newClone($input);
678
+ '''
679
+
680
+ def free_tpl (self ):
681
+ return '''
682
+ TH${T}Tensor_free($id);
683
+ '''
684
+
685
+ register (Clone , torch .autograd ._functions .tensor .Clone )
686
+
687
+
666
688
class Sigmoid (Emitter ):
667
689
668
690
def __init__ (self , obj , prevfns ):
You can’t perform that action at this time.
0 commit comments