Skip to content

Commit cfa67c9

Browse files
author
Josh Fromm
committed
Post merge cleanup
1 parent 31ef462 commit cfa67c9

File tree

8 files changed

+137
-26
lines changed

8 files changed

+137
-26
lines changed

ci/jenkins/docker-images.ini

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
[jenkins]
2020
ci_arm: tlcpack/ci-arm:20230223-070143-a3b51f11b
2121
ci_cortexm: tlcpackstaging/ci_cortexm:20230124-233207-fd3f8035c
22-
ci_cpu: tlcpack/ci-cpu:20230308-070109-9d732d0fa
22+
ci_cpu: tlcpack/ci-cpu:relax-20230217-001605-fcb3d9e71
2323
ci_gpu: tlcpack/ci-gpu:20230308-070109-9d732d0fa
2424
ci_hexagon: tlcpack/ci_hexagon:20230127-185848-95fa22308
2525
ci_i386: tlcpack/ci-i386:20221013-060115-61c9742ea

ci/jenkins/unity_jenkinsfile.groovy

Lines changed: 39 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ import org.jenkinsci.plugins.pipeline.modeldefinition.Utils
3232
// NOTE: these lines are scanned by docker/dev_common.sh. Please update the regex as needed. -->
3333
ci_lint = 'tlcpack/ci-lint:20221025-182121-e41d0ed6e'
3434
ci_gpu = 'tlcpack/ci-gpu:20221128-070141-ae4fd7df7'
35-
ci_cpu = 'tlcpack/ci-cpu:20230110-070003-d00168ffb'
35+
ci_cpu = 'tlcpackstaging/ci_cpu:relax-20230217-001605-fcb3d9e71'
3636
ci_wasm = 'tlcpack/ci-wasm:v0.72'
3737
ci_i386 = 'tlcpack/ci-i386:v0.75'
3838
ci_qemu = 'tlcpack/ci-qemu:v0.11'
@@ -135,9 +135,9 @@ def should_skip_ci(pr_number) {
135135

136136
cancel_previous_build()
137137

138-
def lint() {
138+
def lint(node_type) {
139139
stage('Prepare') {
140-
node('CPU-SMALL') {
140+
node(node_type) {
141141
// When something is provided in ci_*_param, use it, otherwise default with ci_*
142142
ci_lint = params.ci_lint_param ?: ci_lint
143143
ci_cpu = params.ci_cpu_param ?: ci_cpu
@@ -161,10 +161,12 @@ stage('Prepare') {
161161
""", label: 'Docker image names')
162162
}
163163
}
164+
}
164165

166+
def sanity_check(node_type) {
165167
stage('Sanity Check') {
166168
timeout(time: max_time, unit: 'MINUTES') {
167-
node('CPU-SMALL') {
169+
node(node_type) {
168170
ws(per_exec_ws('tvm/sanity')) {
169171
init_git()
170172
is_docs_only_build = sh (
@@ -187,8 +189,17 @@ stage('Sanity Check') {
187189
}
188190
}
189191
}
192+
try {
193+
lint('CPU-SMALL-SPOT')
194+
} catch(Exception ex) {
195+
lint('CPU-SMALL')
196+
}
190197

191-
lint()
198+
try {
199+
sanity_check('CPU-SPOT')
200+
} catch(Exception ex) {
201+
sanity_check('CPU')
202+
}
192203

193204
// Run make. First try to do an incremental make from a previous workspace in hope to
194205
// accelerate the compilation. If something is wrong, clean the workspace and then
@@ -308,10 +319,8 @@ def add_hexagon_permissions() {
308319
// NOTE: limit tests to relax folder for now to allow us to skip some of the tests
309320
// that are mostly related to changes in main.
310321
// This helps to speedup CI time and reduce CI cost.
311-
stage('Build and Test') {
312-
if (is_docs_only_build != 1) {
313-
parallel 'BUILD: GPU': {
314-
node('GPU') {
322+
def build_test_gpu(node_type) {
323+
node(node_type) {
315324
ws(per_exec_ws('tvm/build-gpu')) {
316325
init_git()
317326
sh "${docker_run} ${ci_gpu} nvidia-smi"
@@ -320,16 +329,34 @@ stage('Build and Test') {
320329
sh "${docker_run} ${ci_gpu} ./tests/scripts/unity/task_python_relax_gpuonly.sh"
321330
}
322331
}
323-
},
324-
'BUILD: CPU': {
325-
node('CPU-SMALL') {
332+
}
333+
334+
def build_test_cpu(node_type) {
335+
node(node_type) {
326336
ws(per_exec_ws('tvm/build-cpu')) {
327337
init_git()
328338
sh "${docker_run} ${ci_cpu} ./tests/scripts/task_config_build_cpu.sh build"
329339
make(ci_cpu, 'build', '-j2')
330340
sh "${docker_run} ${ci_cpu} ./tests/scripts/unity/task_python_relax.sh"
331341
}
332342
}
343+
}
344+
345+
stage('Build and Test') {
346+
if (is_docs_only_build != 1) {
347+
parallel 'BUILD: GPU': {
348+
try {
349+
build_test_gpu('GPU-SPOT')
350+
} catch(Exception ex) {
351+
build_test_gpu('GPU')
352+
}
353+
},
354+
'BUILD: CPU': {
355+
try {
356+
build_test_cpu('CPU-SPOT')
357+
} catch(Exception ex) {
358+
build_test_cpu('CPU')
359+
}
333360
}
334361
} else {
335362
Utils.markStageSkippedForConditional('BUILD: CPU')

python/tvm/relax/frontend/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,4 +17,4 @@
1717
"""
1818
Frontends for constructing Relax programs, with the model importers
1919
"""
20-
from .common import detach_params
20+
from .common import detach_params, SpanContext, attach_span, emit_te_with_span

python/tvm/relax/frontend/common.py

Lines changed: 64 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,11 @@
1616
# under the License.
1717
# pylint: disable=invalid-name
1818
"""Commons for Relax frontend."""
19-
from typing import Dict, List, Tuple
19+
from typing import Dict, List, Tuple, Union, Callable, Any
2020

2121
import tvm
22+
from tvm import relax
23+
from ...ir import Span, SourceName
2224

2325

2426
def detach_params(mod: tvm.IRModule) -> Tuple[tvm.IRModule, Dict[str, List[tvm.nd.NDArray]]]:
@@ -53,3 +55,64 @@ def detach_params(mod: tvm.IRModule) -> Tuple[tvm.IRModule, Dict[str, List[tvm.n
5355
else:
5456
detached_mod[gv] = func
5557
return detached_mod, params_dict
58+
59+
60+
def emit_te_with_span(bb, func: Callable, *args: Any, **kwargs: Any) -> relax.Var:
61+
"""Same as block_builder.emit_te, but attaches a span to the generated call.
62+
Uses the current span in the SpanContext.
63+
"""
64+
65+
call = bb.call_te(func, *args, **kwargs)
66+
call = attach_span(call)
67+
return bb.emit(call)
68+
69+
70+
def attach_span(op: relax.Call):
71+
"""Attach a span to a Relax op if it doesn't already have one.
72+
Uses the current span in the SpanContext.
73+
Parameters
74+
----------
75+
op : relax.Expr
76+
The op to attach a span to.
77+
Returns
78+
-------
79+
op : relax.Expr
80+
The op with a span attached.
81+
"""
82+
assert isinstance(op, relax.Call), "Expected a Call node but got: {op}".format(op=str(type(op)))
83+
if op.span is None:
84+
return relax.Call(op.op, op.args, op.attrs, op.sinfo_args, SpanContext.current())
85+
return op
86+
87+
88+
class SpanContext:
89+
"""A context manager for setting the current Span.
90+
Parameters
91+
----------
92+
span : Union[Span, str]
93+
The span to set as the current span.
94+
"""
95+
96+
__current_span = None
97+
98+
def __init__(self, span: Union[Span, str]):
99+
assert isinstance(span, (Span, str)), "span must be a Span or str"
100+
if isinstance(span, str):
101+
span = Span(SourceName(span), 0, 0, 0, 0)
102+
SpanContext.__current_span = span
103+
104+
def __enter__(self):
105+
return self
106+
107+
def __exit__(self, ptype, value, trace):
108+
SpanContext.__current_span = None
109+
110+
@staticmethod
111+
def current():
112+
"""Get the span in the current context.
113+
Returns
114+
-------
115+
span : Optional[Span]
116+
The current span.
117+
"""
118+
return SpanContext.__current_span

python/tvm/relax/transform/legalize_ops/statistical.py

Lines changed: 26 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -51,9 +51,17 @@ def _te_variance(x: te.Tensor, axis: List[tir.IntImm], keepdims: bool) -> te.Ten
5151

5252
@register_legalize("relax.mean")
5353
def _mean(bb: BlockBuilder, call: Call) -> Expr:
54-
return bb.call_te(
55-
_te_mean, call.args[0], call.attrs.axis, call.attrs.keepdims, primfunc_name_hint="mean"
54+
# Reductions often overflow fp16 values. If we encounter one, just cast to fp32.
55+
data = call.args[0]
56+
original_dtype = call.args[0].struct_info.dtype
57+
if original_dtype == "float16":
58+
data = bb.normalize(bb.emit_te(topi.cast, data, "float32"))
59+
output = bb.normalize(
60+
bb.call_te(_te_mean, data, call.attrs.axis, call.attrs.keepdims, primfunc_name_hint="mean")
5661
)
62+
if output.struct_info.dtype != original_dtype:
63+
output = bb.emit_te(topi.cast, output, original_dtype)
64+
return output
5765

5866

5967
@register_legalize("relax.std")
@@ -68,13 +76,23 @@ def te_std(x: te.Tensor, axis: List[tir.IntImm], keepdims: bool) -> te.Tensor:
6876

6977
@register_legalize("relax.variance")
7078
def _variance(bb: BlockBuilder, call: Call) -> Expr:
71-
return bb.call_te(
72-
_te_variance,
73-
call.args[0],
74-
call.attrs.axis,
75-
call.attrs.keepdims,
76-
primfunc_name_hint="variance",
79+
# Reductions often overflow fp16 values. If we encounter one, just cast to fp32.
80+
data = call.args[0]
81+
original_dtype = call.args[0].struct_info.dtype
82+
if original_dtype == "float16":
83+
data = bb.normalize(bb.emit_te(topi.cast, data, "float32"))
84+
output = bb.normalize(
85+
bb.call_te(
86+
_te_variance,
87+
data,
88+
call.attrs.axis,
89+
call.attrs.keepdims,
90+
primfunc_name_hint="variance",
91+
)
7792
)
93+
if output.struct_info.dtype != original_dtype:
94+
output = bb.emit_te(topi.cast, output, original_dtype)
95+
return output
7896

7997

8098
register_legalize("relax.max", _statistical(topi.max))

python/tvm/relax/vm_build.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -293,7 +293,8 @@ def foo(x: Tensor((3, 4), "float32"), y: Tensor((3, 4), "float32")):
293293
target = tvm.target.Target(target)
294294

295295
passes = []
296-
passes.append(relax.transform.RewriteDataflowReshape())
296+
# TODO(jwfromm) Reenable once slice bug is fixed.
297+
# passes.append(relax.transform.RewriteDataflowReshape())
297298
passes.append(relax.transform.ToNonDataflow())
298299
passes.append(relax.transform.CallTIRRewrite())
299300
passes.append(relax.transform.StaticPlanBlockMemory())

src/relax/ir/block_builder.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -574,7 +574,7 @@ class Normalizer : public BlockBuilderImpl, private ExprFunctor<Expr(const Expr&
574574
if (unchanged) {
575575
call = GetRef<Call>(op);
576576
} else {
577-
call = Call(new_op, new_args, op->attrs, op->sinfo_args);
577+
call = Call(new_op, new_args, op->attrs, op->sinfo_args, op->span);
578578
}
579579

580580
if (!call->struct_info_.defined()) {

tests/scripts/unity/task_python_relax.sh

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,9 @@ export TVM_NUM_THREADS=2
2929
make cython3
3030

3131
# Run Relax tests
32-
TVM_TEST_TARGETS="${TVM_RELAY_TEST_TARGETS:-llvm}" pytest tests/python/relax
32+
export TVM_TEST_TARGETS="${TVM_RELAY_TEST_TARGETS:-llvm}"
33+
export PLATFORM=cpu
34+
run_pytest cython unity-relax tests/python/relax
3335

3436
# Run Relax examples
3537
# python3 ./apps/relax_examples/mlp.py

0 commit comments

Comments
 (0)