File tree Expand file tree Collapse file tree 2 files changed +11
-3
lines changed Expand file tree Collapse file tree 2 files changed +11
-3
lines changed Original file line number Diff line number Diff line change @@ -2685,6 +2685,13 @@ TEST(ComputeFlopsTest, Basic) {
2685
2685
ASSERT_EQ (flops, 360 );
2686
2686
}
2687
2687
2688
+ TEST (TestConstant, TensorGrad) {
2689
+ auto graph = std::make_shared<Graph>();
2690
+ IValue ten = torch::randn ({3 , 5 }).requires_grad_ (true );
2691
+ auto con = tryInsertConstant (*graph, ten);
2692
+ ASSERT_TRUE (con == c10::nullopt);
2693
+ }
2694
+
2688
2695
TEST (TestMutation, Basic) {
2689
2696
auto graph = std::make_shared<Graph>();
2690
2697
std::unordered_map<std::string, Value*> vmap;
Original file line number Diff line number Diff line change @@ -10,7 +10,9 @@ namespace torch {
10
10
namespace jit {
11
11
12
12
bool insertableTensor (const at::Tensor& ten) {
13
- return !ten.requires_grad ();
13
+ // bail if tensor has no storage i.e. opaque tensor used in MKLdnn.
14
+ // or gradients because we have no way of serializing them & are mutable
15
+ return !ten.requires_grad () && ten.has_storage ();
14
16
}
15
17
16
18
bool insertableIValue (const IValue& ivalue) {
@@ -65,8 +67,7 @@ c10::optional<Value*> tryInsertConstant(
65
67
Node* n = g.create (prim::Constant);
66
68
if (val.isTensor ()) {
67
69
at::Tensor ref = val.toTensor ();
68
- if (!ref.has_storage ()) {
69
- // bail if tensor has no storage i.e. opaque tensor used in MKLdnn.
70
+ if (!insertableTensor (val.toTensor ())) {
70
71
n->destroy ();
71
72
return c10::nullopt;
72
73
}
You can’t perform that action at this time.
0 commit comments