Skip to content

Commit

Permalink
Review edits
Browse files Browse the repository at this point in the history
  • Loading branch information
Tabrizian committed Apr 13, 2022
1 parent 50ae996 commit 145bf4a
Show file tree
Hide file tree
Showing 2 changed files with 90 additions and 46 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,69 +30,103 @@


class ArgumentValidationTest(unittest.TestCase):

def test_infer_request_args(self):
# Dummy arguments used in the tests.
inputs = [pb_utils.Tensor('INPUT0', np.asarray([1, 2], dtype=np.int32))]
model_name = 'my_model'
requested_output_names = ['my_output']

#
# inputs field validation
with self.assertRaises(BaseException) as e:
pb_utils.InferenceRequest(inputs=[None], model_name=model_name,
requested_output_names=requested_output_names)

with self.assertRaises(BaseException) as e:
pb_utils.InferenceRequest(inputs=None, model_name=model_name,
requested_output_names=requested_output_names)
#

# Test list of None as inputs
with self.assertRaises(pb_utils.TritonModelException) as e:
pb_utils.InferenceRequest(
inputs=[None],
model_name=model_name,
requested_output_names=requested_output_names)

# Test None object as list of inputs
with self.assertRaises(TypeError) as e:
pb_utils.InferenceRequest(
inputs=None,
model_name=model_name,
requested_output_names=requested_output_names)

# model_name validation
with self.assertRaises(BaseException) as e:
pb_utils.InferenceRequest(model_name=None, inputs=inputs,
requested_output_names=requested_output_names)
with self.assertRaises(TypeError) as e:
pb_utils.InferenceRequest(
model_name=None,
inputs=inputs,
requested_output_names=requested_output_names)

#
# Requested output name validations
with self.assertRaises(BaseException) as e:
pb_utils.InferenceRequest(requested_output_names=[None],
inputs=inputs, model_name=model_name)
#

with self.assertRaises(BaseException) as e:
pb_utils.InferenceRequest(requested_output_names=None,
inputs=inputs, model_name=model_name)
# Test list of None objects as requested_output_names
with self.assertRaises(TypeError) as e:
pb_utils.InferenceRequest(requested_output_names=[None],
inputs=inputs,
model_name=model_name)

with self.assertRaises(BaseException) as e:
with self.assertRaises(TypeError) as e:
pb_utils.InferenceRequest(requested_output_names=None,
inputs=inputs, model_name=model_name)
inputs=inputs,
model_name=model_name)

# Other arguments validation
with self.assertRaises(BaseException) as e:
pb_utils.InferenceRequest(requested_output_names=requested_output_names,
inputs=inputs, model_name=model_name, correleation_id=None)

with self.assertRaises(BaseException) as e:
pb_utils.InferenceRequest(requested_output_names=requested_output_names,
inputs=inputs, model_name=model_name, request_id=None)

with self.assertRaises(BaseException) as e:
pb_utils.InferenceRequest(requested_output_names=requested_output_names,
inputs=inputs, model_name=model_name, model_version=None)

with self.assertRaises(BaseException) as e:
pb_utils.InferenceRequest(requested_output_names=requested_output_names,
inputs=inputs, model_name=model_name, flags=None)

# This should not raise an exception
pb_utils.InferenceRequest(requested_output_names=[], inputs=[],
model_name=model_name)

# correlation_id set to None
with self.assertRaises(TypeError) as e:
pb_utils.InferenceRequest(
requested_output_names=requested_output_names,
inputs=inputs,
model_name=model_name,
correleation_id=None)

# request_id set to None
with self.assertRaises(TypeError) as e:
pb_utils.InferenceRequest(
requested_output_names=requested_output_names,
inputs=inputs,
model_name=model_name,
request_id=None)

# model_version set to None
with self.assertRaises(TypeError) as e:
pb_utils.InferenceRequest(
requested_output_names=requested_output_names,
inputs=inputs,
model_name=model_name,
model_version=None)

# flags set to None
with self.assertRaises(TypeError) as e:
pb_utils.InferenceRequest(
requested_output_names=requested_output_names,
inputs=inputs,
model_name=model_name,
flags=None)

# Empty lists should not raise an exception
pb_utils.InferenceRequest(requested_output_names=[],
inputs=[],
model_name=model_name)

def test_infer_response_args(self):
outputs = [pb_utils.Tensor('OUTPUT0', np.asarray([1, 2], dtype=np.int32))]
outputs = [
pb_utils.Tensor('OUTPUT0', np.asarray([1, 2], dtype=np.int32))
]

# Inference Response
with self.assertRaises(BaseException) as e:
# Test list of None object as output tensor
with self.assertRaises(pb_utils.TritonModelException) as e:
pb_utils.InferenceResponse(output_tensors=[None])

with self.assertRaises(BaseException) as e:
# Test None as output tensors
with self.assertRaises(TypeError) as e:
pb_utils.InferenceResponse(output_tensors=None)

# This should not raise an exception
Expand All @@ -102,15 +136,26 @@ def test_infer_response_args(self):
def test_tensor_args(self):
np_array = np.asarray([1, 2], dtype=np.int32)

with self.assertRaises(BaseException) as e:
# Test None as tensor name
with self.assertRaises(TypeError) as e:
pb_utils.Tensor(None, np_array)

with self.assertRaises(BaseException) as e:
# Test None as Numpy array
with self.assertRaises(TypeError) as e:
pb_utils.Tensor("OUTPUT0", None)

with self.assertRaises(BaseException) as e:
# Test None as dlpack capsule
with self.assertRaises(TypeError) as e:
pb_utils.Tensor.from_dlpack("OUTPUT0", None)

# Test empty string as model name (from_dlpack)
with self.assertRaises(TypeError) as e:
pb_utils.Tensor.from_dlpack("", None)

# Test empty string as model name
with self.assertRaises(TypeError) as e:
pb_utils.Tensor("", None)


class TritonPythonModel:
"""This model tests the Python API arguments to make sure invalid args are
Expand All @@ -129,4 +174,3 @@ def execute(self, requests):
dtype=np.float16))
]))
return responses

2 changes: 1 addition & 1 deletion qa/L0_backend_python/argument_validation/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ TEST_RESULT_FILE='test_results.txt'
TRITON_DIR=${TRITON_DIR:="/opt/tritonserver"}
SERVER=${TRITON_DIR}/bin/tritonserver
BACKEND_DIR=${TRITON_DIR}/backends
SERVER_ARGS="--model-repository=`pwd`/models --model-control-mode=explicit --backend-directory=${BACKEND_DIR} --log-verbose=1"
SERVER_ARGS="--model-repository=`pwd`/models --backend-directory=${BACKEND_DIR} --log-verbose=1"
SERVER_LOG="./inference_server.log"

RET=0
Expand Down

0 comments on commit 145bf4a

Please sign in to comment.