forked from triton-inference-server/server
-
Notifications
You must be signed in to change notification settings - Fork 0
/
WORKSPACE
118 lines (109 loc) · 4 KB
/
WORKSPACE
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NVIDIA CORPORATION nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
workspace(name = "inference_server")
local_repository(
name = "org_tensorflow",
path = "/opt/tensorflow/",
)
local_repository(
name = "tf_serving",
path = __workspace_dir__ + "/serving/",
)
new_local_repository(
name = "extern_lib",
path = "/opt/tensorrtserver/lib",
build_file_content = """
cc_library(
name = "libcaffe2",
srcs = ["libcaffe2.so"],
visibility = ["//visibility:public"],
)
cc_library(
name = "libcaffe2_gpu",
srcs = ["libcaffe2_gpu.so"],
visibility = ["//visibility:public"],
)
cc_library(
name = "libcaffe2_detectron_ops_gpu",
srcs = ["libcaffe2_detectron_ops_gpu.so"],
visibility = ["//visibility:public"],
)
cc_library(
name = "libc10",
srcs = ["libc10.so"],
visibility = ["//visibility:public"],
)
cc_library(
name = "libmkl_core",
srcs = ["libmkl_core.so"],
visibility = ["//visibility:public"],
)
cc_library(
name = "libmkl_gnu_thread",
srcs = ["libmkl_gnu_thread.so"],
visibility = ["//visibility:public"],
)
cc_library(
name = "libmkl_avx2",
srcs = ["libmkl_avx2.so"],
visibility = ["//visibility:public"],
)
cc_library(
name = "libmkl_def",
srcs = ["libmkl_def.so"],
visibility = ["//visibility:public"],
)
cc_library(
name = "libmkl_intel_lp64",
srcs = ["libmkl_intel_lp64.so"],
visibility = ["//visibility:public"],
)
""",
)
# Need prometheus for metrics
http_archive(
name = "prometheus",
strip_prefix = "prometheus-cpp-0.5.0",
urls = ["https://github.com/jupp0r/prometheus-cpp/archive/v0.5.0.tar.gz"],
)
load("@prometheus//:repositories.bzl", "load_civetweb")
load_civetweb()
# TensorFlow depends on "io_bazel_rules_closure" so we need this here.
# Needs to be kept in sync with the same target in TensorFlow's WORKSPACE file.
http_archive(
name = "io_bazel_rules_closure",
sha256 = "a38539c5b5c358548e75b44141b4ab637bba7c4dc02b46b1f62a96d6433f56ae",
strip_prefix = "rules_closure-dbb96841cc0a5fb2664c37822803b06dab20c7d1",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_closure/archive/dbb96841cc0a5fb2664c37822803b06dab20c7d1.tar.gz",
"https://github.com/bazelbuild/rules_closure/archive/dbb96841cc0a5fb2664c37822803b06dab20c7d1.tar.gz", # 2018-04-13
],
)
load('@tf_serving//tensorflow_serving:workspace.bzl', 'tf_serving_workspace')
tf_serving_workspace()
# Specify the minimum required bazel version.
load("@org_tensorflow//tensorflow:version_check.bzl", "check_bazel_version_at_least")
check_bazel_version_at_least("0.15.0")