Skip to content

Commit 012c9df

Browse files
author
Josh Fromm
authored
[Unity] Merge with tvm/unity (apache#54)
It's that time again, this is another merge with tvm/unity to grab the latest improvements.
2 parents 41a2e4f + cfa67c9 commit 012c9df

File tree

222 files changed

+11307
-2164
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

222 files changed

+11307
-2164
lines changed

ci/jenkins/docker-images.ini

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
ci_arm: tlcpack/ci-arm:20230223-070143-a3b51f11b
2121
ci_cortexm: tlcpackstaging/ci_cortexm:20230124-233207-fd3f8035c
2222
ci_cpu: tlcpack/ci-cpu:relax-20230217-001605-fcb3d9e71
23-
ci_gpu: tlcpack/ci-gpu:20221128-070141-ae4fd7df7
23+
ci_gpu: tlcpack/ci-gpu:20230308-070109-9d732d0fa
2424
ci_hexagon: tlcpack/ci_hexagon:20230127-185848-95fa22308
2525
ci_i386: tlcpack/ci-i386:20221013-060115-61c9742ea
2626
ci_lint: tlcpack/ci-lint:20221013-060115-61c9742ea

cmake/modules/contrib/DNNL.cmake

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ elseif(USE_DNNL STREQUAL "C_SRC")
5252
find_library(EXTERN_LIBRARY_DNNL dnnl)
5353
list(APPEND TVM_RUNTIME_LINKER_LIBS ${EXTERN_LIBRARY_DNNL})
5454
tvm_file_glob(GLOB DNNL_CONTRIB_SRC src/runtime/contrib/dnnl/dnnl.cc
55+
src/runtime/contrib/dnnl/dnnl_utils.cc
5556
src/runtime/contrib/cblas/dnnl_blas.cc)
5657
list(APPEND RUNTIME_SRCS ${DNNL_CONTRIB_SRC})
5758
message(STATUS "Build with DNNL C source module: " ${EXTERN_LIBRARY_DNNL})

conda/recipe/meta.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
# specific language governing permissions and limitations
1616
# under the License.
1717

18-
{% set version = '0.11.dev0' %}
18+
{% set version = '0.12.dev0' %}
1919
{% set pkg_name = 'tvm' %}
2020
{% set cuda_tag = cuda_version | replace('.', '') %} # [cuda]
2121
{% set pkg_name = pkg_name + '-cu' + cuda_tag %} # [cuda]

docker/Dockerfile.ci_arm

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,8 @@ COPY install/ubuntu_install_sccache.sh /install/ubuntu_install_sccache.sh
5050
RUN bash /install/ubuntu_install_sccache.sh
5151
ENV PATH /opt/sccache:$PATH
5252

53-
COPY install/ubuntu_install_llvm.sh /install/ubuntu_install_llvm.sh
54-
RUN bash /install/ubuntu_install_llvm.sh
53+
COPY install/ubuntu_install_llvm_from_source.sh /install/ubuntu_install_llvm_from_source.sh
54+
RUN bash /install/ubuntu_install_llvm_from_source.sh 15.0.7 8b5fcb24b4128cf04df1b0b9410ce8b1a729cb3c544e6da885d234280dedeac6
5555

5656
ENV TVM_VENV /venv/apache-tvm-py3.7
5757
COPY python/bootstrap/lockfiles /install/python/bootstrap/lockfiles
Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
#!/bin/bash
2+
3+
# Licensed to the Apache Software Foundation (ASF) under one
4+
# or more contributor license agreements. See the NOTICE file
5+
# distributed with this work for additional information
6+
# regarding copyright ownership. The ASF licenses this file
7+
# to you under the Apache License, Version 2.0 (the
8+
# "License"); you may not use this file except in compliance
9+
# with the License. You may obtain a copy of the License at
10+
#
11+
# http://www.apache.org/licenses/LICENSE-2.0
12+
#
13+
# Unless required by applicable law or agreed to in writing,
14+
# software distributed under the License is distributed on an
15+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
16+
# KIND, either express or implied. See the License for the
17+
# specific language governing permissions and limitations
18+
# under the License.
19+
# This script builds LLVM and clang from the llvm-project tarball
20+
# using CMake. It is tested with LLVM from version 15.
21+
22+
set -e
23+
24+
LLVM_VERSION=$1
25+
LLVM_FILE_SHA=$2
26+
27+
echo ${LLVM_VERSION}
28+
29+
tmpdir=$(mktemp -d)
30+
31+
cleanup()
32+
{
33+
rm -rf "$tmpdir"
34+
}
35+
36+
trap cleanup 0
37+
38+
pushd "$tmpdir"
39+
40+
curl -sL \
41+
https://github.com/llvm/llvm-project/releases/download/llvmorg-${LLVM_VERSION}/llvm-project-${LLVM_VERSION}.src.tar.xz \
42+
-o llvm-project-${LLVM_VERSION}.src.tar.xz
43+
echo "$LLVM_FILE_SHA llvm-project-${LLVM_VERSION}.src.tar.xz" | sha256sum --check
44+
tar xf llvm-project-${LLVM_VERSION}.src.tar.xz
45+
pushd llvm-project-${LLVM_VERSION}.src
46+
47+
pushd llvm
48+
mkdir build
49+
pushd build
50+
cmake \
51+
-DCMAKE_BUILD_TYPE=Release \
52+
-DCMAKE_MODULE_PATH="/llvm-project-${LLVM_VERSION}.src/cmake/Modules" \
53+
-DCMAKE_INSTALL_PREFIX=/usr \
54+
-DLLVM_TARGETS_TO_BUILD="AArch64;ARM;X86" \
55+
-DLLVM_INCLUDE_DOCS=OFF \
56+
-DLLVM_INCLUDE_EXAMPLES=OFF \
57+
-DLLVM_INCLUDE_TESTS=OFF \
58+
-DLLVM_INCLUDE_UTILS=OFF \
59+
-DLLVM_ENABLE_TERMINFO=OFF \
60+
-DLLVM_ENABLE_ASSERTIONS=ON \
61+
-DLLVM_ENABLE_RTTI=ON \
62+
-DLLVM_ENABLE_OCAMLDOC=OFF \
63+
-DLLVM_USE_INTEL_JITEVENTS=ON \
64+
-DLLVM_TEMPORARILY_ALLOW_OLD_TOOLCHAIN=ON \
65+
-DPYTHON_EXECUTABLE="$(cpython_path 3.7)/bin/python" \
66+
-GNinja \
67+
..
68+
ninja install
69+
popd
70+
popd
71+
72+
# clang is only used to precompile Gandiva bitcode
73+
if [ ${LLVM_VERSION_MAJOR} -lt 9 ]; then
74+
clang_package_name=cfe
75+
else
76+
clang_package_name=clang
77+
fi
78+
79+
pushd ${clang_package_name}
80+
mkdir build
81+
pushd build
82+
cmake \
83+
-DCMAKE_BUILD_TYPE=Release \
84+
-DCMAKE_INSTALL_PREFIX=/usr \
85+
-DCMAKE_MODULE_PATH="/llvm-project-${LLVM_VERSION}.src/cmake/Modules" \
86+
-DCLANG_INCLUDE_TESTS=OFF \
87+
-DCLANG_INCLUDE_DOCS=OFF \
88+
-DLLVM_INCLUDE_TESTS=OFF \
89+
-DLLVM_INCLUDE_DOCS=OFF \
90+
-Wno-dev \
91+
-GNinja \
92+
..
93+
94+
ninja -w dupbuild=warn install # both clang and llvm builds generate llvm-config file
95+
popd
96+
popd
97+
98+
# out of llvm-project-${LLVM_VERSION}.src
99+
popd
100+
popd

docker/install/ubuntu_install_paddle.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,4 @@ set -e
2020
set -u
2121
set -o pipefail
2222

23-
pip install paddlepaddle==2.4.1
23+
pip install paddlepaddle==2.4.2

include/tvm/node/reflection.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -244,7 +244,7 @@ class ReflectionVTable::Registry {
244244
* static constexpr const std::nullptr_t VisitAttrs = nullptr;
245245
*
246246
* static void SHashReduce(const runtime::StringObj* key, SHashReducer hash_reduce) {
247-
* hash_reduce->SHashReduceHashedValue(runtime::String::HashBytes(key->data, key->size));
247+
* hash_reduce->SHashReduceHashedValue(runtime::String::StableHashBytes(key->data, key->size));
248248
* }
249249
*
250250
* static bool SEqualReduce(const runtime::StringObj* lhs,

include/tvm/node/script_printer.h

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,11 @@ class PrinterConfigNode : public Object {
4545
std::string tir_prefix = "T";
4646
/*! \brief The prefix of Relax nodes */
4747
std::string relax_prefix = "R";
48+
/*!
49+
* \brief The alias of the current module at cross-function call
50+
* \note Directly use module name if it's empty.
51+
*/
52+
std::string module_alias = "cls";
4853
/*! \brief Default data type of TIR buffer */
4954
DataType buffer_dtype = DataType::Float(32);
5055
/*! \brief Default data type of integer literals */
@@ -78,6 +83,9 @@ class PrinterConfigNode : public Object {
7883
v->Visit("binding_names", &binding_names);
7984
v->Visit("show_meta", &show_meta);
8085
v->Visit("ir_prefix", &ir_prefix);
86+
v->Visit("tir_prefix", &tir_prefix);
87+
v->Visit("relax_prefix", &relax_prefix);
88+
v->Visit("module_alias", &module_alias);
8189
v->Visit("buffer_dtype", &buffer_dtype);
8290
v->Visit("int_dtype", &int_dtype);
8391
v->Visit("float_dtype", &float_dtype);
@@ -92,6 +100,8 @@ class PrinterConfigNode : public Object {
92100
v->Visit("obj_to_annotate", &obj_to_annotate);
93101
}
94102

103+
Array<String> GetBuiltinKeywords();
104+
95105
static constexpr const char* _type_key = "node.PrinterConfig";
96106
TVM_DECLARE_FINAL_OBJECT_INFO(PrinterConfigNode, Object);
97107
};

include/tvm/node/structural_hash.h

Lines changed: 41 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -36,52 +36,60 @@ namespace tvm {
3636
* \brief Hash definition of base value classes.
3737
*/
3838
class BaseValueHash {
39-
public:
40-
size_t operator()(const double& key) const { return std::hash<double>()(key); }
41-
42-
size_t operator()(const int64_t& key) const { return std::hash<int64_t>()(key); }
43-
44-
size_t operator()(const uint64_t& key) const { return std::hash<uint64_t>()(key); }
45-
46-
size_t operator()(const int& key) const { return std::hash<int>()(key); }
47-
48-
size_t operator()(const bool& key) const { return std::hash<bool>()(key); }
49-
50-
size_t operator()(const std::string& key) const { return std::hash<std::string>()(key); }
51-
52-
size_t operator()(const runtime::DataType& key) const {
53-
return std::hash<int32_t>()(static_cast<int32_t>(key.code()) |
54-
(static_cast<int32_t>(key.bits()) << 8) |
55-
(static_cast<int32_t>(key.lanes()) << 16));
39+
protected:
40+
template <typename T, typename U>
41+
uint64_t Reinterpret(T value) const {
42+
union Union {
43+
T a;
44+
U b;
45+
} u;
46+
static_assert(sizeof(Union) == sizeof(T), "sizeof(Union) != sizeof(T)");
47+
static_assert(sizeof(Union) == sizeof(U), "sizeof(Union) != sizeof(U)");
48+
u.b = 0;
49+
u.a = value;
50+
return u.b;
5651
}
5752

53+
public:
54+
uint64_t operator()(const float& key) const { return Reinterpret<float, uint32_t>(key); }
55+
uint64_t operator()(const double& key) const { return Reinterpret<double, uint64_t>(key); }
56+
uint64_t operator()(const int64_t& key) const { return Reinterpret<int64_t, uint64_t>(key); }
57+
uint64_t operator()(const uint64_t& key) const { return key; }
58+
uint64_t operator()(const int& key) const { return Reinterpret<int, uint32_t>(key); }
59+
uint64_t operator()(const bool& key) const { return key; }
60+
uint64_t operator()(const runtime::DataType& key) const {
61+
return Reinterpret<DLDataType, uint32_t>(key);
62+
}
5863
template <typename ENum, typename = typename std::enable_if<std::is_enum<ENum>::value>::type>
59-
bool operator()(const ENum& key) const {
60-
return std::hash<size_t>()(static_cast<size_t>(key));
64+
uint64_t operator()(const ENum& key) const {
65+
return Reinterpret<int64_t, uint64_t>(static_cast<int64_t>(key));
66+
}
67+
uint64_t operator()(const std::string& key) const {
68+
return runtime::String::StableHashBytes(key.data(), key.length());
6169
}
6270
};
6371

6472
/*!
65-
* \brief Content-aware structural hasing.
73+
* \brief Content-aware structural hashing.
6674
*
6775
* The structural hash value is recursively defined in the DAG of IRNodes.
6876
* There are two kinds of nodes:
6977
*
7078
* - Normal node: the hash value is defined by its content and type only.
7179
* - Graph node: each graph node will be assigned a unique index ordered by the
72-
* first occurence during the visit. The hash value of a graph node is
80+
* first occurrence during the visit. The hash value of a graph node is
7381
* combined from the hash values of its contents and the index.
7482
*/
7583
class StructuralHash : public BaseValueHash {
7684
public:
77-
// inheritate operator()
85+
// inherit operator()
7886
using BaseValueHash::operator();
7987
/*!
8088
* \brief Compute structural hashing value for an object.
8189
* \param key The left operand.
8290
* \return The hash value.
8391
*/
84-
TVM_DLL size_t operator()(const ObjectRef& key) const;
92+
TVM_DLL uint64_t operator()(const ObjectRef& key) const;
8593
};
8694

8795
/*!
@@ -109,23 +117,23 @@ class SHashReducer {
109117
*
110118
* \param hashed_value The hashed value
111119
*/
112-
virtual void SHashReduceHashedValue(size_t hashed_value) = 0;
120+
virtual void SHashReduceHashedValue(uint64_t hashed_value) = 0;
113121
/*!
114122
* \brief Append hash value of key to the current sequence of hashes.
115123
*
116124
* \param key The object to compute hash from.
117-
* \param map_free_vars Whether to map free variables by their occurence number.
125+
* \param map_free_vars Whether to map free variables by their occurrence number.
118126
*/
119127
virtual void SHashReduce(const ObjectRef& key, bool map_free_vars) = 0;
120128
/*!
121-
* \brief Apppend a hash value of free variable to the current sequence of hashes.
129+
* \brief Append a hash value of free variable to the current sequence of hashes.
122130
*
123131
* \param var The var of interest.
124-
* \param map_free_vars Whether to map free variables by their occurence number.
132+
* \param map_free_vars Whether to map free variables by their occurrence number.
125133
*
126134
* \note If map_free_vars is set to be true,
127135
* internally the handler can maintain a counter to encode free variables
128-
* by their order of occurence. This helps to resolve variable
136+
* by their order of occurrence. This helps to resolve variable
129137
* mapping of function parameters and let binding variables.
130138
*
131139
* If map_free_vars is set to be false, the address of the variable will be used.
@@ -139,7 +147,7 @@ class SHashReducer {
139147
*
140148
* \return Whether there is already a pre-computed hash value.
141149
*/
142-
virtual bool LookupHashedValue(const ObjectRef& key, size_t* hashed_value) = 0;
150+
virtual bool LookupHashedValue(const ObjectRef& key, uint64_t* hashed_value) = 0;
143151
/*!
144152
* \brief Mark current comparison as graph node in hashing.
145153
* Graph node hash will depends on the graph structure.
@@ -193,7 +201,7 @@ class SHashReducer {
193201
/*! \brief Internal class pointer. */
194202
Handler* handler_;
195203
/*!
196-
* \brief Whether or not to map free variables by their occurence
204+
* \brief Whether or not to map free variables by their occurrence
197205
* If the flag is false, then free variables will be mapped
198206
* by their in-memory address.
199207
*/
@@ -210,10 +218,10 @@ class SHashHandlerDefault : public SHashReducer::Handler {
210218
SHashHandlerDefault();
211219
virtual ~SHashHandlerDefault();
212220

213-
void SHashReduceHashedValue(size_t hashed_value) override;
221+
void SHashReduceHashedValue(uint64_t hashed_value) override;
214222
void SHashReduce(const ObjectRef& key, bool map_free_vars) override;
215223
void SHashReduceFreeVar(const runtime::Object* var, bool map_free_vars) override;
216-
bool LookupHashedValue(const ObjectRef& key, size_t* hashed_value) override;
224+
bool LookupHashedValue(const ObjectRef& key, uint64_t* hashed_value) override;
217225
void MarkGraphNode() override;
218226

219227
/*!
@@ -222,7 +230,7 @@ class SHashHandlerDefault : public SHashReducer::Handler {
222230
* \param map_free_vars Whether or not to remap variables if possible.
223231
* \return The hash result.
224232
*/
225-
virtual size_t Hash(const ObjectRef& object, bool map_free_vars);
233+
virtual uint64_t Hash(const ObjectRef& object, bool map_free_vars);
226234

227235
protected:
228236
/*!

include/tvm/relax/attrs/manipulate.h

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,44 @@ struct SqueezeAttrs : public tvm::AttrsNode<SqueezeAttrs> {
102102
}
103103
}; // struct SqueezeAttrs
104104

105+
/*! \brief Attributes used in repeat operators */
106+
struct RepeatAttrs : public tvm::AttrsNode<RepeatAttrs> {
107+
int repeats;
108+
Optional<Integer> axis;
109+
110+
TVM_DECLARE_ATTRS(RepeatAttrs, "relax.attrs.RepeatAttrs") {
111+
TVM_ATTR_FIELD(repeats).describe("The number of repetitions.");
112+
TVM_ATTR_FIELD(axis).describe(
113+
"The axis along which to repeat values. The negative numbers are interpreted "
114+
"counting from the backward. By default, use the flattened input array, and "
115+
"return a flat output array.");
116+
}
117+
}; // struct RepeatAttrs
118+
119+
/*! \brief Attributes used in tile operators */
120+
struct TileAttrs : public tvm::AttrsNode<TileAttrs> {
121+
Array<Integer> repeats;
122+
123+
TVM_DECLARE_ATTRS(TileAttrs, "relax.attrs.TileAttrs") {
124+
TVM_ATTR_FIELD(repeats).describe("The number of repetitions of data along each axis.");
125+
}
126+
}; // struct TileAttrs
127+
128+
/*! \brief Attributes used in cumsum operators */
129+
struct CumsumAttrs : public tvm::AttrsNode<CumsumAttrs> {
130+
Optional<Integer> axis;
131+
DataType dtype;
132+
133+
TVM_DECLARE_ATTRS(CumsumAttrs, "relax.attrs.CumsumAttrs") {
134+
TVM_ATTR_FIELD(axis).describe(
135+
"Axis along which the cumulative sum is computed."
136+
"The default (None) is to compute the cumsum over the flattened array.");
137+
TVM_ATTR_FIELD(dtype).describe(
138+
"Type of the returned array and of the accumulator in which the elements are summed."
139+
"If dtype is not specified, it defaults to the dtype of data.");
140+
}
141+
}; // struct CumsumAttrs
142+
105143
} // namespace relax
106144
} // namespace tvm
107145

0 commit comments

Comments
 (0)