diff --git a/README.md b/README.md index b60eda602..461d5b8c7 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,6 @@ You can install extra extensions to enable the following models: - PaddlePaddle model - TFLite model -- Neo DLR (TVM) model - XGBoost model - LightGBM model - Sentencepiece model diff --git a/benchmark/README.md b/benchmark/README.md index 0f61ca877..9b326302f 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -191,7 +191,6 @@ By default, the above script will use MXNet as the default Engine, but you can a -e OnnxRuntime # pytorch -e TFLite # TFLite -e TensorRT # TensorRT --e DLR # Neo DLR -e XGBoost # XGBoost -e LightGBM # LightGBM -e Python # Python script diff --git a/benchmark/build.gradle b/benchmark/build.gradle index a8749bfb3..450d92e31 100644 --- a/benchmark/build.gradle +++ b/benchmark/build.gradle @@ -15,7 +15,6 @@ dependencies { runtimeOnly "ai.djl.mxnet:mxnet-model-zoo" runtimeOnly "ai.djl.paddlepaddle:paddlepaddle-model-zoo" runtimeOnly "ai.djl.tflite:tflite-engine" - runtimeOnly "ai.djl.dlr:dlr-engine" runtimeOnly "ai.djl.ml.xgboost:xgboost" runtimeOnly project(":engines:python") runtimeOnly "ai.djl.tensorrt:tensorrt" diff --git a/benchmark/snapcraft/snapcraft.yaml b/benchmark/snapcraft/snapcraft.yaml index 6f1300f2f..d630da1ad 100644 --- a/benchmark/snapcraft/snapcraft.yaml +++ b/benchmark/snapcraft/snapcraft.yaml @@ -16,7 +16,6 @@ description: | - ONNXRuntime - TensorRT - TensorFlow Lite - - Neo DLR - XGBoost - Python diff --git a/benchmark/src/main/java/ai/djl/benchmark/Benchmark.java b/benchmark/src/main/java/ai/djl/benchmark/Benchmark.java index c726b7462..73a04e4d6 100644 --- a/benchmark/src/main/java/ai/djl/benchmark/Benchmark.java +++ b/benchmark/src/main/java/ai/djl/benchmark/Benchmark.java @@ -124,9 +124,6 @@ private static void configEngines(boolean multithreading) { if (System.getProperty("ai.djl.tflite.disable_alternative") == null) { System.setProperty("ai.djl.tflite.disable_alternative", "true"); } - if (System.getProperty("ai.djl.dlr.disable_alternative") == null) { - System.setProperty("ai.djl.dlr.disable_alternative", "true"); - } if (System.getProperty("ai.djl.paddlepaddle.disable_alternative") == null) { System.setProperty("ai.djl.paddlepaddle.disable_alternative", "true"); } diff --git a/plugins/management-console/webapp/src/view/Dependency.vue b/plugins/management-console/webapp/src/view/Dependency.vue index d365fc452..dda5271ef 100644 --- a/plugins/management-console/webapp/src/view/Dependency.vue +++ b/plugins/management-console/webapp/src/view/Dependency.vue @@ -55,7 +55,6 @@ - diff --git a/serving/docs/configurations.md b/serving/docs/configurations.md index 02c00bd57..104daad15 100644 --- a/serving/docs/configurations.md +++ b/serving/docs/configurations.md @@ -63,18 +63,11 @@ DJLServing build on top of Deep Java Library (DJL). Here is a list of settings f | PADDLE_LIBRARY_PATH | env var/system prop | User provided custom PaddlePaddle native library | | ai.djl.paddlepaddle.disable_alternative | system prop | Disable alternative engine | -### Neo DLR (TVM) - -| Key | Type | Description | -|--------------------------------|-------------|-----------------------------------------| -| DLR_LIBRARY_PATH | env var | User provided custom DLR native library | -| ai.djl.dlr.disable_alternative | system prop | Disable alternative engine | - ### Huggingface tokenizers -| Key | Type | Description | -|------------------|---------|-----------------------------------------| -| TOKENIZERS_CACHE | env var | User provided custom DLR native library | +| Key | Type | Description | +|------------------|---------|-----------------------------------------------------------| +| TOKENIZERS_CACHE | env var | User provided custom Huggingface tokenizer native library | ### Python @@ -193,7 +186,6 @@ The follow table show some engine specific environment variables that is overrid | TF_NUM_INTEROP_THREADS | TensorFlow | default 1, OMP_NUM_THREADS will override this value | | TF_NUM_INTRAOP_THREADS | TensorFlow | default 1 | | TF_CPP_MIN_LOG_LEVEL | TensorFlow | default 1 | -| TVM_NUM_THREADS | DLR/TVM | default 1, OMP_NUM_THREADS will override this value | | MXNET_ENGINE_TYPE | MXNet | this value must be `NaiveEngine` | ## Appendix diff --git a/serving/src/main/java/ai/djl/serving/ModelServer.java b/serving/src/main/java/ai/djl/serving/ModelServer.java index f7503defa..35fe6e28d 100644 --- a/serving/src/main/java/ai/djl/serving/ModelServer.java +++ b/serving/src/main/java/ai/djl/serving/ModelServer.java @@ -582,9 +582,6 @@ private String inferEngine(Path modelDir, String modelName) { return "PaddlePaddle"; } else if (Files.isRegularFile(modelDir.resolve(modelName + ".json"))) { return "XGBoost"; - } else if (Files.isRegularFile(modelDir.resolve(modelName + ".dylib")) - || Files.isRegularFile(modelDir.resolve(modelName + ".so"))) { - return "DLR"; } logger.warn("Failed to detect engine of the model: {}", modelDir); return null; diff --git a/serving/src/main/java/ai/djl/serving/plugins/DependencyManager.java b/serving/src/main/java/ai/djl/serving/plugins/DependencyManager.java index d251d5a30..cb48dbff8 100644 --- a/serving/src/main/java/ai/djl/serving/plugins/DependencyManager.java +++ b/serving/src/main/java/ai/djl/serving/plugins/DependencyManager.java @@ -114,9 +114,6 @@ public void installEngine(String engineName) throws IOException { case "XGBoost": installDependency("ai.djl.ml.xgboost:xgboost:" + djlVersion); break; - case "DLR": - installDependency("ai.djl.dlr:dlr-engine:" + djlVersion); - break; default: break; } diff --git a/serving/src/main/puml/architecture.puml b/serving/src/main/puml/architecture.puml index 60d3a5c40..2829cc538 100644 --- a/serving/src/main/puml/architecture.puml +++ b/serving/src/main/puml/architecture.puml @@ -50,7 +50,6 @@ package "DJL Serving - single process" { MXNet OnnxRuntime TFLite - DLR XGBoost ] } diff --git a/serving/src/test/java/ai/djl/serving/ModelServerTest.java b/serving/src/test/java/ai/djl/serving/ModelServerTest.java index 9a73b2ca8..df3979f43 100644 --- a/serving/src/test/java/ai/djl/serving/ModelServerTest.java +++ b/serving/src/test/java/ai/djl/serving/ModelServerTest.java @@ -188,11 +188,6 @@ public void testModelStore() String expected = modelDir.toUri().toURL().toString(); - Path dlr = modelDir.resolve("test_model.so"); - Files.createFile(dlr); - url = server.mapModelUrl(modelDir); - assertEquals(url, "test_model::DLR:*=" + expected); - Path xgb = modelDir.resolve("test_model.json"); Files.createFile(xgb); url = server.mapModelUrl(modelDir);