@@ -112,7 +112,7 @@ stage("Sanity Check") {
112
112
node(' CPU' ) {
113
113
ws(per_exec_ws(" tvm/sanity" )) {
114
114
init_git()
115
- sh " ${ docker_run} ${ ci_lint} -- ./tests/scripts/task_lint.sh"
115
+ sh " ${ docker_run} ${ ci_lint} ./tests/scripts/task_lint.sh"
116
116
}
117
117
}
118
118
}
@@ -124,18 +124,18 @@ stage("Sanity Check") {
124
124
def make (docker_type , path , make_flag ) {
125
125
timeout(time : max_time, unit : ' MINUTES' ) {
126
126
try {
127
- sh " ${ docker_run} ${ docker_type} -- ./tests/scripts/task_build.sh ${ path} ${ make_flag} "
127
+ sh " ${ docker_run} ${ docker_type} ./tests/scripts/task_build.sh ${ path} ${ make_flag} "
128
128
// always run cpp test when build
129
- sh " ${ docker_run} ${ docker_type} -- ./tests/scripts/task_cpp_unittest.sh"
129
+ sh " ${ docker_run} ${ docker_type} ./tests/scripts/task_cpp_unittest.sh"
130
130
} catch (hudson.AbortException ae) {
131
131
// script exited due to user abort, directly throw instead of retry
132
132
if (ae. getMessage(). contains(' script returned exit code 143' )) {
133
133
throw ae
134
134
}
135
135
echo ' Incremental compilation failed. Fall back to build from scratch'
136
- sh " ${ docker_run} ${ docker_type} -- ./tests/scripts/task_clean.sh ${ path} "
137
- sh " ${ docker_run} ${ docker_type} -- ./tests/scripts/task_build.sh ${ path} ${ make_flag} "
138
- sh " ${ docker_run} ${ docker_type} -- ./tests/scripts/task_cpp_unittest.sh"
136
+ sh " ${ docker_run} ${ docker_type} ./tests/scripts/task_clean.sh ${ path} "
137
+ sh " ${ docker_run} ${ docker_type} ./tests/scripts/task_build.sh ${ path} ${ make_flag} "
138
+ sh " ${ docker_run} ${ docker_type} ./tests/scripts/task_cpp_unittest.sh"
139
139
}
140
140
}
141
141
}
@@ -164,11 +164,11 @@ stage('Build') {
164
164
node(' GPUBUILD' ) {
165
165
ws(per_exec_ws(" tvm/build-gpu" )) {
166
166
init_git()
167
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_config_build_gpu.sh"
167
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_config_build_gpu.sh"
168
168
make(ci_gpu, ' build' , ' -j2' )
169
169
pack_lib(' gpu' , tvm_multilib)
170
170
// compiler test
171
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_config_build_gpu_vulkan.sh"
171
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_config_build_gpu_vulkan.sh"
172
172
make(ci_gpu, ' build2' , ' -j2' )
173
173
}
174
174
}
@@ -177,18 +177,18 @@ stage('Build') {
177
177
node(' CPU' ) {
178
178
ws(per_exec_ws(" tvm/build-cpu" )) {
179
179
init_git()
180
- sh " ${ docker_run} ${ ci_cpu} -- ./tests/scripts/task_config_build_cpu.sh"
180
+ sh " ${ docker_run} ${ ci_cpu} ./tests/scripts/task_config_build_cpu.sh"
181
181
make(ci_cpu, ' build' , ' -j2' )
182
182
pack_lib(' cpu' , tvm_multilib)
183
183
timeout(time : max_time, unit : ' MINUTES' ) {
184
- sh " ${ docker_run} ${ ci_cpu} -- ./tests/scripts/task_ci_setup.sh"
185
- sh " ${ docker_run} ${ ci_cpu} -- ./tests/scripts/task_python_unittest.sh"
186
- sh " ${ docker_run} ${ ci_cpu} -- ./tests/scripts/task_python_integration.sh"
187
- sh " ${ docker_run} ${ ci_cpu} -- ./tests/scripts/task_python_vta_fsim.sh"
188
- sh " ${ docker_run} ${ ci_cpu} -- ./tests/scripts/task_python_vta_tsim.sh"
189
- // sh "${docker_run} ${ci_cpu} -- ./tests/scripts/task_golang.sh"
184
+ sh " ${ docker_run} ${ ci_cpu} ./tests/scripts/task_ci_setup.sh"
185
+ sh " ${ docker_run} ${ ci_cpu} ./tests/scripts/task_python_unittest.sh"
186
+ sh " ${ docker_run} ${ ci_cpu} ./tests/scripts/task_python_integration.sh"
187
+ sh " ${ docker_run} ${ ci_cpu} ./tests/scripts/task_python_vta_fsim.sh"
188
+ sh " ${ docker_run} ${ ci_cpu} ./tests/scripts/task_python_vta_tsim.sh"
189
+ // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_golang.sh"
190
190
// TODO(@jroesch): need to resolve CI issue will turn back on in follow up patch
191
- // sh "${docker_run} ${ci_cpu} -- ./tests/scripts/task_rust.sh"
191
+ // sh "${docker_run} ${ci_cpu} ./tests/scripts/task_rust.sh"
192
192
junit " build/pytest-results/*.xml"
193
193
}
194
194
}
@@ -198,11 +198,11 @@ stage('Build') {
198
198
node(' CPU' ) {
199
199
ws(per_exec_ws(" tvm/build-wasm" )) {
200
200
init_git()
201
- sh " ${ docker_run} ${ ci_wasm} -- ./tests/scripts/task_config_build_wasm.sh"
201
+ sh " ${ docker_run} ${ ci_wasm} ./tests/scripts/task_config_build_wasm.sh"
202
202
make(ci_wasm, ' build' , ' -j2' )
203
203
timeout(time : max_time, unit : ' MINUTES' ) {
204
- sh " ${ docker_run} ${ ci_wasm} -- ./tests/scripts/task_ci_setup.sh"
205
- sh " ${ docker_run} ${ ci_wasm} -- ./tests/scripts/task_web_wasm.sh"
204
+ sh " ${ docker_run} ${ ci_wasm} ./tests/scripts/task_ci_setup.sh"
205
+ sh " ${ docker_run} ${ ci_wasm} ./tests/scripts/task_web_wasm.sh"
206
206
}
207
207
}
208
208
}
@@ -211,7 +211,7 @@ stage('Build') {
211
211
node(' CPU' ) {
212
212
ws(per_exec_ws(" tvm/build-i386" )) {
213
213
init_git()
214
- sh " ${ docker_run} ${ ci_i386} -- ./tests/scripts/task_config_build_i386.sh"
214
+ sh " ${ docker_run} ${ ci_i386} ./tests/scripts/task_config_build_i386.sh"
215
215
make(ci_i386, ' build' , ' -j2' )
216
216
pack_lib(' i386' , tvm_multilib)
217
217
}
@@ -221,7 +221,7 @@ stage('Build') {
221
221
node(' ARM' ) {
222
222
ws(per_exec_ws(" tvm/build-arm" )) {
223
223
init_git()
224
- sh " ${ docker_run} ${ ci_arm} -- ./tests/scripts/task_config_build_arm.sh"
224
+ sh " ${ docker_run} ${ ci_arm} ./tests/scripts/task_config_build_arm.sh"
225
225
make(ci_arm, ' build' , ' -j4' )
226
226
pack_lib(' arm' , tvm_multilib)
227
227
}
@@ -231,11 +231,11 @@ stage('Build') {
231
231
node(' CPU' ) {
232
232
ws(per_exec_ws(" tvm/build-qemu" )) {
233
233
init_git()
234
- sh " ${ docker_run} ${ ci_qemu} -- ./tests/scripts/task_config_build_qemu.sh"
234
+ sh " ${ docker_run} ${ ci_qemu} ./tests/scripts/task_config_build_qemu.sh"
235
235
make(ci_qemu, ' build' , ' -j2' )
236
236
timeout(time : max_time, unit : ' MINUTES' ) {
237
- sh " ${ docker_run} ${ ci_qemu} -- ./tests/scripts/task_ci_setup.sh"
238
- sh " ${ docker_run} ${ ci_qemu} -- ./tests/scripts/task_python_microtvm.sh"
237
+ sh " ${ docker_run} ${ ci_qemu} ./tests/scripts/task_ci_setup.sh"
238
+ sh " ${ docker_run} ${ ci_qemu} ./tests/scripts/task_python_microtvm.sh"
239
239
junit " build/pytest-results/*.xml"
240
240
}
241
241
}
@@ -250,10 +250,10 @@ stage('Unit Test') {
250
250
init_git()
251
251
unpack_lib(' gpu' , tvm_multilib)
252
252
timeout(time : max_time, unit : ' MINUTES' ) {
253
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_ci_setup.sh"
254
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_sphinx_precheck.sh"
255
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_python_unittest_gpuonly.sh"
256
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_python_integration_gpuonly.sh"
253
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_ci_setup.sh"
254
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_sphinx_precheck.sh"
255
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_python_unittest_gpuonly.sh"
256
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_python_integration_gpuonly.sh"
257
257
junit " build/pytest-results/*.xml"
258
258
}
259
259
}
@@ -265,10 +265,10 @@ stage('Unit Test') {
265
265
init_git()
266
266
unpack_lib(' i386' , tvm_multilib)
267
267
timeout(time : max_time, unit : ' MINUTES' ) {
268
- sh " ${ docker_run} ${ ci_i386} -- ./tests/scripts/task_ci_setup.sh"
269
- sh " ${ docker_run} ${ ci_i386} -- ./tests/scripts/task_python_unittest.sh"
270
- sh " ${ docker_run} ${ ci_i386} -- ./tests/scripts/task_python_integration.sh"
271
- sh " ${ docker_run} ${ ci_i386} -- ./tests/scripts/task_python_vta_fsim.sh"
268
+ sh " ${ docker_run} ${ ci_i386} ./tests/scripts/task_ci_setup.sh"
269
+ sh " ${ docker_run} ${ ci_i386} ./tests/scripts/task_python_unittest.sh"
270
+ sh " ${ docker_run} ${ ci_i386} ./tests/scripts/task_python_integration.sh"
271
+ sh " ${ docker_run} ${ ci_i386} ./tests/scripts/task_python_vta_fsim.sh"
272
272
junit " build/pytest-results/*.xml"
273
273
}
274
274
}
@@ -280,8 +280,8 @@ stage('Unit Test') {
280
280
init_git()
281
281
unpack_lib(' arm' , tvm_multilib)
282
282
timeout(time : max_time, unit : ' MINUTES' ) {
283
- sh " ${ docker_run} ${ ci_arm} -- ./tests/scripts/task_ci_setup.sh"
284
- sh " ${ docker_run} ${ ci_arm} -- ./tests/scripts/task_python_unittest.sh"
283
+ sh " ${ docker_run} ${ ci_arm} ./tests/scripts/task_ci_setup.sh"
284
+ sh " ${ docker_run} ${ ci_arm} ./tests/scripts/task_python_unittest.sh"
285
285
junit " build/pytest-results/*.xml"
286
286
// sh "${docker_run} ${ci_arm} ./tests/scripts/task_python_integration.sh"
287
287
}
@@ -294,8 +294,8 @@ stage('Unit Test') {
294
294
init_git()
295
295
unpack_lib(' gpu' , tvm_multilib)
296
296
timeout(time : max_time, unit : ' MINUTES' ) {
297
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_ci_setup.sh"
298
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_java_unittest.sh"
297
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_ci_setup.sh"
298
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_java_unittest.sh"
299
299
}
300
300
}
301
301
}
@@ -309,8 +309,8 @@ stage('Integration Test') {
309
309
init_git()
310
310
unpack_lib(' gpu' , tvm_multilib)
311
311
timeout(time : max_time, unit : ' MINUTES' ) {
312
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_ci_setup.sh"
313
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_python_topi.sh"
312
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_ci_setup.sh"
313
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_python_topi.sh"
314
314
junit " build/pytest-results/*.xml"
315
315
}
316
316
}
@@ -322,8 +322,8 @@ stage('Integration Test') {
322
322
init_git()
323
323
unpack_lib(' gpu' , tvm_multilib)
324
324
timeout(time : max_time, unit : ' MINUTES' ) {
325
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_ci_setup.sh"
326
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_python_frontend.sh"
325
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_ci_setup.sh"
326
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_python_frontend.sh"
327
327
junit " build/pytest-results/*.xml"
328
328
}
329
329
}
@@ -335,8 +335,8 @@ stage('Integration Test') {
335
335
init_git()
336
336
unpack_lib(' cpu' , tvm_multilib)
337
337
timeout(time : max_time, unit : ' MINUTES' ) {
338
- sh " ${ docker_run} ${ ci_cpu} -- ./tests/scripts/task_ci_setup.sh"
339
- sh " ${ docker_run} ${ ci_cpu} -- ./tests/scripts/task_python_frontend_cpu.sh"
338
+ sh " ${ docker_run} ${ ci_cpu} ./tests/scripts/task_ci_setup.sh"
339
+ sh " ${ docker_run} ${ ci_cpu} ./tests/scripts/task_python_frontend_cpu.sh"
340
340
junit " build/pytest-results/*.xml"
341
341
}
342
342
}
@@ -348,8 +348,8 @@ stage('Integration Test') {
348
348
init_git()
349
349
unpack_lib(' gpu' , tvm_multilib)
350
350
timeout(time : max_time, unit : ' MINUTES' ) {
351
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_ci_setup.sh"
352
- sh " ${ docker_run} ${ ci_gpu} -- ./tests/scripts/task_python_docs.sh"
351
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_ci_setup.sh"
352
+ sh " ${ docker_run} ${ ci_gpu} ./tests/scripts/task_python_docs.sh"
353
353
}
354
354
pack_lib(' mydocs' , ' docs.tgz' )
355
355
}
@@ -361,13 +361,13 @@ stage('Integration Test') {
361
361
stage('Build packages') {
362
362
parallel 'conda CPU': {
363
363
node('CPU') {
364
- sh "${docker_run} tlcpack/conda-cpu -- ./conda/build_cpu.sh
364
+ sh "${docker_run} tlcpack/conda-cpu ./conda/build_cpu.sh
365
365
}
366
366
},
367
367
'conda cuda': {
368
368
node('CPU') {
369
- sh "${docker_run} tlcpack/conda-cuda90 -- ./conda/build_cuda.sh
370
- sh "${docker_run} tlcpack/conda-cuda100 -- ./conda/build_cuda.sh
369
+ sh "${docker_run} tlcpack/conda-cuda90 ./conda/build_cuda.sh
370
+ sh "${docker_run} tlcpack/conda-cuda100 ./conda/build_cuda.sh
371
371
}
372
372
}
373
373
// Here we could upload the packages to anaconda for releases
0 commit comments