Skip to content

Commit

Permalink
[PIR]Open uts for 214-220 (PaddlePaddle#60724)
Browse files Browse the repository at this point in the history
  • Loading branch information
0x45f authored Jan 16, 2024
1 parent d431622 commit 27c4227
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 8 deletions.
10 changes: 6 additions & 4 deletions test/legacy_test/test_activation_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -4050,6 +4050,7 @@ def test_base_api(self):
out_ref = ref_stanh(self.x_np, self.scale_a, self.scale_b)
np.testing.assert_allclose(out_ref, res[0], rtol=1e-05)

@test_with_pir_api
def test_errors(self):
with static_guard():
with paddle.static.program_guard(paddle.static.Program()):
Expand All @@ -4061,10 +4062,11 @@ def test_errors(self):
)
self.assertRaises(TypeError, paddle.stanh, x_int32)
# support the input dtype is float16
x_fp16 = paddle.static.data(
name='x_fp16', shape=[12, 10], dtype='float16'
)
paddle.stanh(x_fp16)
if core.is_compiled_with_cuda():
x_fp16 = paddle.static.data(
name='x_fp16', shape=[12, 10], dtype='float16'
)
paddle.stanh(x_fp16)


class TestSTanhAPIScaleA(TestSTanhAPI):
Expand Down
1 change: 1 addition & 0 deletions test/legacy_test/test_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -2208,6 +2208,7 @@ def test_affine_grid(self):
self.assertIsNotNone(data_0)
self.assertIsNotNone(data_1)

@test_with_pir_api
def test_stridedslice(self):
axes = [0, 1, 2]
starts = [1, 0, 2]
Expand Down
1 change: 1 addition & 0 deletions test/legacy_test/test_linalg_pinv_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,7 @@ def pinv_zero_input_dynamic(self):
x = paddle.to_tensor(np.reshape(array, [0, 0]), dtype='float32')
paddle.linalg.pinv(x)

@test_with_pir_api
def test_div_by_zero(self):
with self.assertRaises(ValueError):
self.pinv_zero_input_dynamic()
Expand Down
5 changes: 4 additions & 1 deletion test/legacy_test/test_nn_margin_rank_loss.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import paddle
from paddle import base
from paddle.base import core
from paddle.base.framework import in_pir_mode
from paddle.pir_utils import test_with_pir_api


Expand Down Expand Up @@ -82,6 +83,7 @@ def run_static_functional_api(self, place):
)
np.testing.assert_allclose(result_numpy, expected, rtol=1e-05)

@test_with_pir_api
def run_static_api(self, place):
paddle.enable_static()
expected = calc_margin_rank_loss(
Expand Down Expand Up @@ -117,7 +119,8 @@ def run_static_api(self, place):
fetch_list=[result],
)
np.testing.assert_allclose(result_numpy, expected, rtol=1e-05)
self.assertTrue('loss' in result.name)
if not in_pir_mode():
self.assertTrue('loss' in result.name)

def run_dynamic_functional_api(self, place):
paddle.disable_static(place)
Expand Down
1 change: 1 addition & 0 deletions test/legacy_test/test_square_error_cost.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ def test_square_error_cost(self):


class TestSquareErrorInvalidInput(unittest.TestCase):
@test_with_pir_api
def test_error(self):
def test_invalid_input():
input = [256, 3]
Expand Down
6 changes: 3 additions & 3 deletions test/legacy_test/test_svd_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,13 +72,13 @@ def test_svd_forward(self):
paddle.enable_static()

def check_S_grad(self):
self.check_grad(['X'], ['S'], numeric_grad_delta=0.001)
self.check_grad(['X'], ['S'], numeric_grad_delta=0.001, check_pir=True)

def check_U_grad(self):
self.check_grad(['X'], ['U'], numeric_grad_delta=0.001)
self.check_grad(['X'], ['U'], numeric_grad_delta=0.001, check_pir=True)

def check_V_grad(self):
self.check_grad(['X'], ['VH'], numeric_grad_delta=0.001)
self.check_grad(['X'], ['VH'], numeric_grad_delta=0.001, check_pir=True)

def test_check_grad(self):
"""
Expand Down

0 comments on commit 27c4227

Please sign in to comment.