Skip to content

Commit

Permalink
Change module get_json to get_graph_json, add test
Browse files Browse the repository at this point in the history
  • Loading branch information
elvin-n committed Aug 25, 2021
1 parent 6103335 commit 33517c9
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 5 deletions.
7 changes: 3 additions & 4 deletions docs/dev/debugger.rst
Original file line number Diff line number Diff line change
Expand Up @@ -123,12 +123,12 @@ Example of loading the parameters
How to use Debugger?
***************************************

1. In ``config.cmake`` set the ``USE_GRAPH_RUNTIME_DEBUG`` flag to ``ON``
1. In ``config.cmake`` set the ``USE_PROFILER`` flag to ``ON``

::

# Whether enable additional graph debug functions
set(USE_GRAPH_RUNTIME_DEBUG ON)
set(USE_PROFILER ON)

2. Do 'make' tvm, so that it will make the ``libtvm_runtime.so``

Expand All @@ -154,8 +154,7 @@ How to use Debugger?

::
lib = tvm.runtime.load_module("network.so")
m = graph_executor.GraphModuleDebug(lib["debug_create"]("default", dev),
[dev], lib["get_json"](), dump_root="/tmp/tvmdbg")
m = graph_executor.create(lib["get_graph_json"](), lib, dev, dump_root="/tmp/tvmdbg")
# set inputs
m.set_input('data', tvm.nd.array(data.astype(dtype)))
m.set_input(**params)
Expand Down
3 changes: 2 additions & 1 deletion src/runtime/graph_executor/graph_executor_factory.cc
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,10 @@ PackedFunc GraphExecutorFactory::GetFunction(
}
*rv = this->ExecutorCreate(devices);
});
} else if (name == "get_json") {
} else if (name == "get_graph_json") {
return PackedFunc(
[sptr_to_self, this](TVMArgs args, TVMRetValue* rv) { *rv = this->graph_json_; });

} else if (name == "debug_create") {
return PackedFunc([sptr_to_self, this](TVMArgs args, TVMRetValue* rv) {
ICHECK_GE(args.size(), 2);
Expand Down
19 changes: 19 additions & 0 deletions tests/python/unittest/test_runtime_module_based_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,24 @@ def test_cpu():
tvm.testing.assert_allclose(out, verify(data), atol=1e-5)


def test_cpu_get_graph_json():
if not tvm.testing.device_enabled("llvm"):
print("Skip because llvm is not enabled")
return
mod, params = relay.testing.synthetic.get_workload()
with relay.build_config(opt_level=3):
complied_graph_lib = relay.build_module.build(mod, "llvm", params=params)
from tvm.contrib import utils
temp = utils.tempdir()
file_name = "deploy_lib.so"
path_lib = temp.relpath(file_name)
complied_graph_lib.export_library(path_lib)
loaded_lib = tvm.runtime.load_module(path_lib)
json = loaded_lib["get_graph_json"]()
assert(isinstance(json, str) == True)
assert(json.find("tvmgen_default_fused_nn_softmax1") == 6312)


@tvm.testing.requires_cuda
@tvm.testing.requires_gpu
def test_gpu():
Expand Down Expand Up @@ -619,3 +637,4 @@ def make_module(mod):
test_remove_package_params()
test_debug_graph_executor()
test_multiple_imported_modules()
test_cpu_get_graph_json()

0 comments on commit 33517c9

Please sign in to comment.