Skip to content

Commit

Permalink
Feature: Upgrade torchserve to 0.4.1 (kubeflow#1738)
Browse files Browse the repository at this point in the history
* Feature: Upgrade torchserve to 0.4.1

* Fix: Update torchserve version in readme
  • Loading branch information
Jagadeesh J authored Jul 27, 2021
1 parent e394be4 commit 1381f86
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 20 deletions.
4 changes: 2 additions & 2 deletions config/configmap/inferenceservice.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ data:
},
"v2" : {
"image": "pytorch/torchserve-kfs",
"defaultImageVersion": "0.4.0",
"defaultGpuImageVersion": "0.4.0-gpu",
"defaultImageVersion": "0.4.1",
"defaultGpuImageVersion": "0.4.1-gpu",
"supportedFrameworks": [
"pytorch"
],
Expand Down
4 changes: 2 additions & 2 deletions config/overlays/test/configmap/inferenceservice.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ data:
},
"v2" : {
"image": "pytorch/torchserve-kfs",
"defaultImageVersion": "0.4.0",
"defaultGpuImageVersion": "0.3.0-gpu",
"defaultImageVersion": "0.4.1",
"defaultGpuImageVersion": "0.4.1-gpu",
"supportedFrameworks": [
"pytorch"
],
Expand Down
4 changes: 2 additions & 2 deletions docs/samples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ After models are deployed onto model servers with KFServing, you get all the fol
| ------------- | ------------- | ------------- | ------------- | ------------- | ------------- | ------------- |
| [Triton Inference Server](https://github.com/triton-inference-server/server) | [TensorFlow,TorchScript,ONNX,TensorRT](https://docs.nvidia.com/deeplearning/triton-inference-server/user-guide/docs/model_repository.html)| v2 | :heavy_check_mark: | :heavy_check_mark: | [Compatibility Matrix](https://docs.nvidia.com/deeplearning/frameworks/support-matrix/index.html)| [Triton Examples](./v1beta1/triton) |
| [TFServing](https://www.tensorflow.org/tfx/guide/serving) | [TensorFlow SavedModel](https://www.tensorflow.org/guide/saved_model) | v1 | :heavy_check_mark: | :heavy_check_mark: | [TFServing Versions](https://github.com/tensorflow/serving/releases) | [TensorFlow Examples](./v1alpha2/tensorflow) |
| [TorchServe](https://pytorch.org/serve/server.html) | [Eager Model/TorchScript](https://pytorch.org/docs/master/generated/torch.save.html) | v1 | :heavy_check_mark: | :heavy_check_mark: | 0.3.0 | [TorchServe Examples](./v1beta1/torchserve) |
| [TorchServe Native](https://pytorch.org/serve/server.html) | [Eager Model/TorchScript](https://pytorch.org/docs/master/generated/torch.save.html) | native | :heavy_check_mark: | :heavy_check_mark: | 0.3.0 | [TorchServe Examples](./v1beta1/custom/torchserve) |
| [TorchServe](https://pytorch.org/serve/server.html) | [Eager Model/TorchScript](https://pytorch.org/docs/master/generated/torch.save.html) | v1 | :heavy_check_mark: | :heavy_check_mark: | 0.4.1 | [TorchServe Examples](./v1beta1/torchserve) |
| [TorchServe Native](https://pytorch.org/serve/server.html) | [Eager Model/TorchScript](https://pytorch.org/docs/master/generated/torch.save.html) | native | :heavy_check_mark: | :heavy_check_mark: | 0.4.1 | [TorchServe Examples](./v1beta1/custom/torchserve) |
| [ONNXRuntime](https://github.com/microsoft/onnxruntime) | [Exported ONNX Model](https://github.com/onnx/tutorials#converting-to-onnx-format) | v1 | :heavy_check_mark: | :heavy_check_mark: | [Compatibility](https://github.com/microsoft/onnxruntime#compatibility) |[ONNX Style Model](./v1alpha2/onnx) |
| [SKLearn MLServer](https://github.com/SeldonIO/MLServer) | [Pickled Model](https://scikit-learn.org/stable/modules/model_persistence.html) | v2 | :heavy_check_mark: | :heavy_check_mark: | 0.23.1 | [SKLearn Iris V2](./v1beta1/sklearn/v2) |
| [XGBoost MLServer](https://github.com/SeldonIO/MLServer) | [Saved Model](https://xgboost.readthedocs.io/en/latest/tutorials/saving_model.html) | v2 | :heavy_check_mark: | :heavy_check_mark: | 1.1.1 | [XGBoost Iris V2](./v1beta1/xgboost) |
Expand Down
24 changes: 12 additions & 12 deletions pkg/apis/serving/v1beta1/predictor_torchserve_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ func TestTorchServeValidation(t *testing.T) {
},
V2: &PredictorConfig{
ContainerImage: "pytorch/torchserve-kfs",
DefaultImageVersion: "0.4.0",
DefaultImageVersion: "0.4.1",
MultiModelServer: false,
},
},
Expand All @@ -56,7 +56,7 @@ func TestTorchServeValidation(t *testing.T) {
spec: PredictorSpec{
PyTorch: &TorchServeSpec{
PredictorExtensionSpec: PredictorExtensionSpec{
RuntimeVersion: proto.String("0.4.0"),
RuntimeVersion: proto.String("0.4.1"),
},
},
},
Expand All @@ -66,7 +66,7 @@ func TestTorchServeValidation(t *testing.T) {
spec: PredictorSpec{
PyTorch: &TorchServeSpec{
PredictorExtensionSpec: PredictorExtensionSpec{
RuntimeVersion: proto.String("0.4.0-gpu"),
RuntimeVersion: proto.String("0.4.1-gpu"),
},
},
},
Expand All @@ -76,7 +76,7 @@ func TestTorchServeValidation(t *testing.T) {
spec: PredictorSpec{
PyTorch: &TorchServeSpec{
PredictorExtensionSpec: PredictorExtensionSpec{
RuntimeVersion: proto.String("0.4.0"),
RuntimeVersion: proto.String("0.4.1"),
Container: v1.Container{
Resources: v1.ResourceRequirements{
Limits: v1.ResourceList{constants.NvidiaGPUResourceType: resource.MustParse("1")},
Expand Down Expand Up @@ -160,7 +160,7 @@ func TestTorchServeDefaulter(t *testing.T) {
},
V2: &PredictorConfig{
ContainerImage: "pytorch/torchserve-kfs",
DefaultImageVersion: "0.4.0",
DefaultImageVersion: "0.4.1",
MultiModelServer: false,
},
},
Expand Down Expand Up @@ -233,15 +233,15 @@ func TestTorchServeDefaulter(t *testing.T) {
ModelClassName: "PyTorchModel",
PredictorExtensionSpec: PredictorExtensionSpec{
ProtocolVersion: &protocolV1,
RuntimeVersion: proto.String("0.4.0"),
RuntimeVersion: proto.String("0.4.1"),
},
},
},
expected: PredictorSpec{
PyTorch: &TorchServeSpec{
ModelClassName: "PyTorchModel",
PredictorExtensionSpec: PredictorExtensionSpec{
RuntimeVersion: proto.String("0.4.0"),
RuntimeVersion: proto.String("0.4.1"),
ProtocolVersion: &protocolV1,
Container: v1.Container{
Name: constants.InferenceServiceContainerName,
Expand Down Expand Up @@ -467,7 +467,7 @@ func TestCreateTorchServeModelServingContainerV2(t *testing.T) {
},
V2: &PredictorConfig{
ContainerImage: "pytorch/torchserve-kfs",
DefaultImageVersion: "0.4.0",
DefaultImageVersion: "0.4.1",
MultiModelServer: false,
},
},
Expand All @@ -488,7 +488,7 @@ func TestCreateTorchServeModelServingContainerV2(t *testing.T) {
PyTorch: &TorchServeSpec{
PredictorExtensionSpec: PredictorExtensionSpec{
StorageURI: proto.String("gs://someUri"),
RuntimeVersion: proto.String("0.4.0"),
RuntimeVersion: proto.String("0.4.1"),
ProtocolVersion: &protocolV1,
Container: v1.Container{
Resources: requestedResource,
Expand All @@ -499,7 +499,7 @@ func TestCreateTorchServeModelServingContainerV2(t *testing.T) {
},
},
expectedContainerSpec: &v1.Container{
Image: "pytorch/torchserve-kfs:0.4.0",
Image: "pytorch/torchserve-kfs:0.4.1",
Name: constants.InferenceServiceContainerName,
Resources: requestedResource,
Args: []string{
Expand Down Expand Up @@ -570,7 +570,7 @@ func TestTorchServeIsMMS(t *testing.T) {
},
V2: &PredictorConfig{
ContainerImage: "pytorch/torchserve-kfs",
DefaultImageVersion: "0.4.0",
DefaultImageVersion: "0.4.1",
MultiModelServer: mmsCase,
},
},
Expand Down Expand Up @@ -647,7 +647,7 @@ func TestTorchServeIsFrameworkSupported(t *testing.T) {
},
V2: &PredictorConfig{
ContainerImage: "pytorch/torchserve-kfs",
DefaultImageVersion: "0.4.0",
DefaultImageVersion: "0.4.1",
SupportedFrameworks: []string{pytorch},
},
},
Expand Down
4 changes: 2 additions & 2 deletions pkg/controller/v1beta1/inferenceservice/utils/utils_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ func TestIsMMSPredictor(t *testing.T) {
},
V2: &PredictorConfig{
ContainerImage: "pytorch/torchserve-kfs",
DefaultImageVersion: "0.4.0",
DefaultImageVersion: "0.4.1",
MultiModelServer: mmsCase,
},
},
Expand Down Expand Up @@ -654,7 +654,7 @@ func TestIsMemoryResourceAvailable(t *testing.T) {
},
V2: &PredictorConfig{
ContainerImage: "pytorch/torchserve-kfs",
DefaultImageVersion: "0.4.0",
DefaultImageVersion: "0.4.1",
},
},
Tensorflow: PredictorConfig{
Expand Down

0 comments on commit 1381f86

Please sign in to comment.