Skip to content

Commit

Permalink
Rollback ONNX Runtime to specific commit as rel-0.5.0 is broken (trit…
Browse files Browse the repository at this point in the history
…on-inference-server#583)

* Rollback ONNX Runtime to specific commit as rel-0.5.0 is broken

* Update submodule to match the commit being checkout
  • Loading branch information
GuanLuo authored Aug 24, 2019
1 parent fd5eca9 commit d9b4bda
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 14 deletions.
19 changes: 9 additions & 10 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -99,13 +99,18 @@ FROM ${BASE_IMAGE} AS trtserver_onnx
# needs to be built from source

# Onnx Runtime release version
ARG ONNX_RUNTIME_VERSION=0.5.0
ARG ONNX_RUNTIME_VERSION=0.4.0

# Get release version of Onnx Runtime
WORKDIR /workspace
RUN apt-get update && apt-get install -y --no-install-recommends git

RUN git clone -b rel-${ONNX_RUNTIME_VERSION} --recursive https://github.com/Microsoft/onnxruntime
# Check out stable commit on master until new release
# to support cloud-based filesystems
RUN git clone --recursive https://github.com/Microsoft/onnxruntime && \
(cd onnxruntime && \
git checkout 2f698bd54b713bb87dbd0bbb913e94bcf7fd480c && \
git submodule update)

ENV PATH="/opt/cmake/bin:${PATH}"
ARG SCRIPT_DIR=/workspace/onnxruntime/tools/ci_build/github/linux/docker/scripts
Expand Down Expand Up @@ -231,14 +236,8 @@ COPY --from=trtserver_caffe2 /opt/conda/lib/python3.6/site-packages/torch/lib/li
/opt/tensorrtserver/lib/

# Onnx Runtime headers and library
# Put include files to same directory as ONNX Runtime changed the include path
# https://github.com/microsoft/onnxruntime/pull/1461
ARG ONNX_RUNTIME_VERSION=0.5.0
COPY --from=trtserver_onnx /workspace/onnxruntime/include/onnxruntime/core/session/onnxruntime_c_api.h \
/opt/tensorrtserver/include/onnxruntime/
COPY --from=trtserver_onnx /workspace/onnxruntime/include/onnxruntime/core/providers/cpu/cpu_provider_factory.h \
/opt/tensorrtserver/include/onnxruntime/
COPY --from=trtserver_onnx /workspace/onnxruntime/include/onnxruntime/core/providers/cuda/cuda_provider_factory.h \
ARG ONNX_RUNTIME_VERSION=0.4.0
COPY --from=trtserver_onnx /workspace/onnxruntime/include/onnxruntime \
/opt/tensorrtserver/include/onnxruntime/
COPY --from=trtserver_onnx /workspace/build/Release/libonnxruntime.so.${ONNX_RUNTIME_VERSION} \
/opt/tensorrtserver/lib/
Expand Down
2 changes: 1 addition & 1 deletion src/backends/onnx/loader.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pragma once

#include <onnxruntime_c_api.h>
#include <core/session/onnxruntime_c_api.h>
#include "src/core/status.h"

namespace nvidia { namespace inferenceserver {
Expand Down
2 changes: 1 addition & 1 deletion src/backends/onnx/onnx_backend.cc
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
#include "src/core/server_status.h"

#ifdef TRTIS_ENABLE_GPU
#include <cuda_provider_factory.h>
#include <core/providers/cuda/cuda_provider_factory.h>
#include <cuda_runtime_api.h>
#endif // TRTIS_ENABLE_GPU

Expand Down
2 changes: 1 addition & 1 deletion src/backends/onnx/onnx_backend.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#pragma once

#include <onnxruntime_c_api.h>
#include <core/session/onnxruntime_c_api.h>
#include "src/core/backend.h"
#include "src/core/backend_context.h"
#include "src/core/model_config.pb.h"
Expand Down
2 changes: 1 addition & 1 deletion src/backends/onnx/onnx_utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@

#pragma once

#include <onnxruntime_c_api.h>
#include <core/session/onnxruntime_c_api.h>
#include "src/core/model_config.h"
#include "src/core/status.h"

Expand Down

0 comments on commit d9b4bda

Please sign in to comment.