Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Autoscheduler][Sparse] Add sparse dense end to end model tuning support for x86/arm cpu & Some bug fix #7635

Merged
merged 27 commits into from
Mar 30, 2021
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update
  • Loading branch information
jcf94 committed Mar 24, 2021
commit c03231955471be7508fcbb7b4a54a518f6d28e7a
21 changes: 21 additions & 0 deletions python/tvm/topi/sparse/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@
# specific language governing permissions and limitations
# under the License.
"""Some utils for Sparse operation."""
import tvm
from tvm import relay
from tvm.relay import data_dep_optimization as ddo


def random_bsr_matrix(m, n, bs_r, bs_c, density, dtype):
Expand Down Expand Up @@ -47,3 +50,21 @@ def random_bsr_matrix(m, n, bs_r, bs_c, density, dtype):
assert s.indices.shape == (num_blocks,)
assert s.indptr.shape == (m // bs_r + 1,)
return s


def random_sparse_dense_params(func, params, density, BS_R, BS_C):
def deepcopy(param_dic):
ret = {}
for k, v in param_dic.items():
ret[k] = tvm.nd.array(v.asnumpy())
return ret

new_params = deepcopy(params)
dense_weight_names = relay.analysis.sparse_dense._search_dense_op_weight(func)
for item in dense_weight_names:
name = str(item)
shape = new_params[name].shape
if shape[0] % BS_R == 0 and shape[1] % BS_C == 0:
new_w = random_bsr_matrix(shape[0], shape[1], BS_R, BS_C, density, "float32").todense()
new_params[name] = tvm.nd.array(new_w)
return new_params
35 changes: 6 additions & 29 deletions tutorials/auto_scheduler/tune_network_arm.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
import tvm
from tvm import relay, auto_scheduler
from tvm.relay import data_dep_optimization as ddo
from tvm.topi.sparse.utils import random_bsr_matrix
from tvm.topi.sparse.utils import random_sparse_dense_params
import tvm.relay.testing
from tvm.contrib import graph_runtime
from tvm.contrib.utils import tempdir
Expand Down Expand Up @@ -140,37 +140,14 @@ def get_network(name, batch_size, layout="NHWC", dtype="float32", use_sparse=Fal
raise ValueError("Network not found.")

if use_sparse:
# This is a test workload that manually transforms a dense model to sparse
# Check `tutorials/frontend/deploy_sparse.py` for more examples on how to import a
# pretrained model.

def random_sparse_dense_params(func, params, density, BS_R, BS_C):
def deepcopy(param_dic):
ret = {}
for k, v in param_dic.items():
ret[k] = tvm.nd.array(v.asnumpy())
return ret

new_params = deepcopy(params)
dense_weight_names = relay.analysis.sparse_dense._search_dense_op_weight(func)
for item in dense_weight_names:
name = str(item)
shape = new_params[name].shape
if shape[0] % BS_R == 0 and shape[1] % BS_C == 0:
new_w = random_bsr_matrix(
shape[0], shape[1], BS_R, BS_C, density, "float32"
).todense()
new_params[name] = tvm.nd.array(new_w)
return new_params

bs_r = 1
bs_c = 1
sparsity = 0.85

# Currently we only support to conver dense matmul to sparse dense matmul
mod, params = ddo.simplify_fc_transpose.convert(mod["main"], params)
params = random_sparse_dense_params(mod, params, BS_R=bs_r, BS_C=1, density=1 - sparsity)
mod, params = ddo.bsr_dense.convert(mod, params, (bs_r, 1), sparsity_threshold=0.8)

# This is a test workload that manually transforms a dense model to sparse
params = random_sparse_dense_params(mod, params, BS_R=bs_r, BS_C=bs_c, density=1 - sparsity)
# Currently we only support to conver dense matmul to sparse dense matmul
mod, params = ddo.bsr_dense.convert(mod, params, (bs_r, bs_c), sparsity_threshold=0.8)
mod = tvm.IRModule.from_expr(mod)

return mod, params, input_shape, output_shape
Expand Down
35 changes: 6 additions & 29 deletions tutorials/auto_scheduler/tune_network_x86.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@
import tvm
from tvm import relay, auto_scheduler
from tvm.relay import data_dep_optimization as ddo
from tvm.topi.sparse.utils import random_bsr_matrix
from tvm.topi.sparse.utils import random_sparse_dense_params
import tvm.relay.testing
from tvm.contrib import graph_runtime

Expand Down Expand Up @@ -137,37 +137,14 @@ def get_network(name, batch_size, layout="NHWC", dtype="float32", use_sparse=Fal
raise ValueError("Network not found.")

if use_sparse:
# This is a test workload that manually transforms a dense model to sparse
# Check `tutorials/frontend/deploy_sparse.py` for more examples on how to import a
# pretrained model.

def random_sparse_dense_params(func, params, density, BS_R, BS_C):
def deepcopy(param_dic):
ret = {}
for k, v in param_dic.items():
ret[k] = tvm.nd.array(v.asnumpy())
return ret

new_params = deepcopy(params)
dense_weight_names = relay.analysis.sparse_dense._search_dense_op_weight(func)
for item in dense_weight_names:
name = str(item)
shape = new_params[name].shape
if shape[0] % BS_R == 0 and shape[1] % BS_C == 0:
new_w = random_bsr_matrix(
shape[0], shape[1], BS_R, BS_C, density, "float32"
).todense()
new_params[name] = tvm.nd.array(new_w)
return new_params

bs_r = 1
bs_c = 1
sparsity = 0.85

# Currently we only support to conver dense matmul to sparse dense matmul
mod, params = ddo.simplify_fc_transpose.convert(mod["main"], params)
params = random_sparse_dense_params(mod, params, BS_R=bs_r, BS_C=1, density=1 - sparsity)
mod, params = ddo.bsr_dense.convert(mod, params, (bs_r, 1), sparsity_threshold=0.8)

# This is a test workload that manually transforms a dense model to sparse
params = random_sparse_dense_params(mod, params, BS_R=bs_r, BS_C=bs_c, density=1 - sparsity)
# Currently we only support to conver dense matmul to sparse dense matmul
mod, params = ddo.bsr_dense.convert(mod, params, (bs_r, bs_c), sparsity_threshold=0.8)
mod = tvm.IRModule.from_expr(mod)

return mod, params, input_shape, output_shape
Expand Down