diff --git a/qa/L0_backend_python/python_test.py b/qa/L0_backend_python/python_test.py index 3c5d520775..2c7d4f8722 100644 --- a/qa/L0_backend_python/python_test.py +++ b/qa/L0_backend_python/python_test.py @@ -297,7 +297,9 @@ def test_unicode(self): model_name = "string" shape = [1] - for i in range(3): + # The first run will use np.bytes_ and the second run will use + # np.object_ + for i in range(2): with self._shm_leak_detector.Probe() as shm_probe: with httpclient.InferenceServerClient( "localhost:8000") as client: @@ -328,7 +330,10 @@ def test_string(self): model_name = "string_fixed" shape = [1] - for i in range(6): + # Test different string outputs. This test will send 4 requests to the + # backend. The model will return 4 responses (np.object_ and np.bytes) * + # (empty output and fixed output) + for i in range(4): with self._shm_leak_detector.Probe() as shm_probe: with httpclient.InferenceServerClient( "localhost:8000") as client: diff --git a/qa/L0_string_io/string_client_test.py b/qa/L0_string_io/string_client_test.py index c50828d262..4b2767d734 100644 --- a/qa/L0_string_io/string_client_test.py +++ b/qa/L0_string_io/string_client_test.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (c) 2019-2020, NVIDIA CORPORATION. All rights reserved. +# Copyright 2019-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions @@ -148,7 +148,7 @@ def _test_str_dtype(self, client, model_name, dtype=np.object_): self._test_infer_non_unicode(model_name, client, in0_bytes) def _test_bytes(self, model_name): - dtypes = [np.object_, np.object, np.bytes_] + dtypes = [np.object_, np.bytes_] # This clients will fail for binary_data=False when the binary input # is not UTF-8 encodable. They should work for other cases however. diff --git a/qa/python_models/string/model.py b/qa/python_models/string/model.py index 1fd5aece6e..7c4eb93623 100644 --- a/qa/python_models/string/model.py +++ b/qa/python_models/string/model.py @@ -1,4 +1,4 @@ -# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. +# Copyright 2021-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions @@ -35,7 +35,7 @@ class TritonPythonModel: def initialize(self, args): self._index = 0 - self._dtypes = [np.bytes_, np.object_, np.object] + self._dtypes = [np.bytes_, np.object_] def execute(self, requests): responses = [] diff --git a/qa/python_models/string_fixed/model.py b/qa/python_models/string_fixed/model.py index d1aed94be3..a925b2bb09 100644 --- a/qa/python_models/string_fixed/model.py +++ b/qa/python_models/string_fixed/model.py @@ -1,4 +1,4 @@ -# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. +# Copyright 2021-2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions @@ -35,21 +35,25 @@ class TritonPythonModel: def initialize(self, args): self._index = 0 - self._dtypes = [np.bytes_, np.object_, np.object] + self._dtypes = [np.bytes_, np.object_] def execute(self, requests): + # Create four different responses (empty string or fixed string) * (two + # datatypes) responses = [] for _ in requests: - if self._index % 2 == 0: + if self._index == 0: out_tensor_0 = pb_utils.Tensor( - "OUTPUT0", - np.array(['123456'], dtype=self._dtypes[self._index % 3])) - else: - # Test sending strings with no elements + "OUTPUT0", np.array(['123456'], dtype=self._dtypes[0])) + elif self._index == 1: out_tensor_0 = pb_utils.Tensor( - "OUTPUT0", np.array([], - dtype=self._dtypes[self._index % 3])) - + "OUTPUT0", np.array([], dtype=self._dtypes[1])) + elif self._index == 2: + out_tensor_0 = pb_utils.Tensor( + "OUTPUT0", np.array(['123456'], dtype=self._dtypes[0])) + elif self._index == 3: + out_tensor_0 = pb_utils.Tensor( + "OUTPUT0", np.array([], dtype=self._dtypes[1])) self._index += 1 responses.append(pb_utils.InferenceResponse([out_tensor_0])) return responses