Skip to content

Commit

Permalink
DLPack: add test using PyTorch DLPack functions. (#8294)
Browse files Browse the repository at this point in the history
Co-authored-by: iefgnoix <isaacwxf23@gmail.com>
  • Loading branch information
ysiraichi and vanbasten23 authored Oct 30, 2024
1 parent 8177447 commit dc20b2d
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 0 deletions.
1 change: 1 addition & 0 deletions .torch_pin
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
#138470
11 changes: 11 additions & 0 deletions test/test_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -2912,6 +2912,17 @@ def test_dlpack_xla_to_pytorch_cuda(self):
cuda_t1[0] = cuda_t1[0] + 20
self.assertTrue(torch.allclose(xla_t1.cpu(), cuda_t1.cpu()))

@onlyIfTorchSupportsCUDA
@onlyIfPJRTDeviceIsCUDA
def test_dlpack_xla_to_pytorch_cuda_protocol_conversion(self):
xla_t1 = torch.arange(5).to(xm.xla_device())
caps_t1 = torch.utils.dlpack.to_dlpack(xla_t1)
cuda_t1 = torch.utils.dlpack.from_dlpack(caps_t1)
self.assertEqual(cuda_t1.device.type, 'cuda')
self.assertEqual(cuda_t1.device.index, xla_t1.device.index)
cuda_t1[0] = cuda_t1[0] + 20
self.assertTrue(torch.allclose(xla_t1.cpu(), cuda_t1.cpu()))

@onlyIfTorchSupportsCUDA
@onlyIfPJRTDeviceIsCUDA
def test_dlpack_non_default_layout(self):
Expand Down

0 comments on commit dc20b2d

Please sign in to comment.