Skip to content

Commit 61af63c

Browse files
committed
fix lint
1 parent 43bd750 commit 61af63c

File tree

2 files changed

+21
-17
lines changed

2 files changed

+21
-17
lines changed

python/tvm/auto_scheduler/auto_schedule.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -159,6 +159,7 @@ def __init__(
159159
measure_callbacks,
160160
)
161161

162+
162163
def create_task(func, args, target, target_host=None, hardware_params=None):
163164
"""Create a search task
164165
@@ -184,6 +185,7 @@ def create_task(func, args, target, target_host=None, hardware_params=None):
184185
dag = ComputeDAG(workload_key)
185186
return SearchTask(dag, workload_key, target, target_host, hardware_params)
186187

188+
187189
def auto_schedule(task, search_policy=None, tuning_options=TuningOptions()):
188190
"""Run auto scheduling search for a task
189191

tutorials/auto_scheduler/tune_matmul_x86.py

Lines changed: 19 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -41,25 +41,27 @@
4141
# The function should return the list of input/output tensors.
4242
# From these tensors, the auto-scheduler can get the whole computational graph.
4343

44+
4445
@auto_scheduler.register_workload
4546
def matmul_add(N, L, M, dtype):
46-
A = te.placeholder((N, L), name='A', dtype=dtype)
47-
B = te.placeholder((L, M), name='B', dtype=dtype)
48-
C = te.placeholder((N, M), name='C', dtype=dtype)
47+
A = te.placeholder((N, L), name="A", dtype=dtype)
48+
B = te.placeholder((L, M), name="B", dtype=dtype)
49+
C = te.placeholder((N, M), name="C", dtype=dtype)
4950

50-
k = te.reduce_axis((0, L), name='k')
51-
matmul = te.compute((N, M), lambda i, j: te.sum(A[i, k] * B[k, j], axis=k), name='matmul')
52-
out = te.compute((N, M), lambda i, j: matmul[i, j] + C[i, j], name='D')
51+
k = te.reduce_axis((0, L), name="k")
52+
matmul = te.compute((N, M), lambda i, j: te.sum(A[i, k] * B[k, j], axis=k), name="matmul")
53+
out = te.compute((N, M), lambda i, j: matmul[i, j] + C[i, j], name="D")
5354

5455
return [A, B, C, out]
5556

57+
5658
######################################################################
5759
# Create the search task
5860
# ^^^^^^^^^^^^^^^^^^^^^^
59-
# We then create the a search task with N=L=M=128 and dtype='float32'
61+
# We then create the a search task with N=L=M=128 and dtype="float32"
6062

6163
target = tvm.target.Target("llvm")
62-
task = auto_scheduler.create_task(matmul_add, (128, 128, 128, 'float32'), target)
64+
task = auto_scheduler.create_task(matmul_add, (128, 128, 128, "float32"), target)
6365

6466
# inspect the computational graph
6567
print(task.compute_dag)
@@ -68,15 +70,16 @@ def matmul_add(N, L, M, dtype):
6870
# Next, we set parameters for the auto-scheduler.
6971
#
7072
# * `num_measure_trials` is the number of measurement trials we can use during the search.
71-
# We only make 10 trials in this tutorial for a fast demonstration. In practice, 1000 is a
73+
# We only make 10 trials in this tutorial for a fast demonstration. In practice, 1000 is a
7274
# good value for the search to converge. You can do more trials according to your time budget.
7375
# * In addition, we use `RecordToFile` to dump measurement records into a file `matmul.json`.
7476
# The measurement records can be used to query the history best, resume the search,
7577
# or do more analysis later.
7678
# * see :any:`auto_schedule.TuningOptions`: for more parameters
7779

78-
tune_option = auto_scheduler.TuningOptions(num_measure_trials=100,
79-
measure_callbacks=[auto_scheduler.RecordToFile('matmul.json')])
80+
tune_option = auto_scheduler.TuningOptions(
81+
num_measure_trials=10, measure_callbacks=[auto_scheduler.RecordToFile('matmul.json')]
82+
)
8083

8184
######################################################################
8285
# Run the search
@@ -85,8 +88,7 @@ def matmul_add(N, L, M, dtype):
8588
# We can kick off the search and let the auto-scheduler do its magic.
8689
# After some measurement trials, it will return the best schedule it founds.
8790

88-
sch, args = auto_scheduler.auto_schedule(task,
89-
tuning_options=tune_option)
91+
sch, args = auto_scheduler.auto_schedule(task, tuning_options=tune_option)
9092

9193
######################################################################
9294
# We can lower schedule to see the IR after auto-scheduling.
@@ -113,19 +115,19 @@ def matmul_add(N, L, M, dtype):
113115
# Using the record file
114116
# ^^^^^^^^^^^^^^^^^^^^^
115117
# During the search, all measuremnt records is dumpped into the record
116-
# file 'matmul.json'. The measurement records can be used to resume the
118+
# file "matmul.json". The measurement records can be used to resume the
117119
# search, re-apply search results and other analysis.
118120
#
119121
# Here we show an example where we load the best schedule from a file,
120122
# print the equivalent python schedule API, and build the binary again.
121123

122-
inp, res = auto_scheduler.load_best('matmul.json', task.workload_key)
124+
inp, res = auto_scheduler.load_best("matmul.json", task.workload_key)
123125

124126
# Print equivalent python schedule API. This can be used for debugging and
125-
# learn the behavior of auto-scheduler.
127+
# learning the behavior of auto-scheduler.
126128
print(task.compute_dag.print_python_code_from_state(inp.state))
127129

128-
# Rebuild the binary. This shows how you can apply the best schedule from a
130+
# Rebuild the binary. This shows how you can apply the best schedule from a
129131
# log file without reruning the search again.
130132
sch, args = task.compute_dag.apply_steps_from_state(inp.state)
131133
func = tvm.build(sch, args)

0 commit comments

Comments
 (0)