Skip to content

Commit

Permalink
[MetaSchedule][Minor] Add Describe Function For Tuning Scripts (apach…
Browse files Browse the repository at this point in the history
…e#11754)

This PR is based on apache#11751 and adds `describe` function for `tune_relay` and `tune_onnx` script on both AutoScheduler and MetaSchedule. It prints out very useful information for reproducibility as follows:
```
Python Environment
  TVM version    = 0.9.dev0
  Python version = 3.8.8 (default, Apr 13 2021, 19:58:26)  [GCC 7.3.0] (64 bit)
  os.uname()     = Linux 5.15.5-76051505-generic #202111250933~1638201579~21.04~09f1aa7-Ubuntu SMP Tue Nov 30 02: x86_64
CMake Options:
  {
    "BUILD_STATIC_RUNTIME": "OFF",
    "COMPILER_RT_PATH": "3rdparty/compiler-rt",
    "CUDA_VERSION": "NOT-FOUND",
    "DLPACK_PATH": "3rdparty/dlpack/include",
    "DMLC_PATH": "3rdparty/dmlc-core/include",
    "GIT_COMMIT_HASH": "3b872a0adae07b0cd60248346fd31b158cba630c",
    "GIT_COMMIT_TIME": "2022-06-15 11:27:59 -0700",
    "HIDE_PRIVATE_SYMBOLS": "OFF",
    "INDEX_DEFAULT_I64": "ON",
    "INSTALL_DEV": "OFF",
    "LLVM_VERSION": "11.0.1",
    "PICOJSON_PATH": "3rdparty/picojson",
    "RANG_PATH": "3rdparty/rang/include",
    "ROCM_PATH": "/opt/rocm",
    "SUMMARIZE": "OFF",
    "TVM_CXX_COMPILER_PATH": "/usr/lib/ccache/c++",
    "USE_ALTERNATIVE_LINKER": "AUTO",
    "USE_AOT_EXECUTOR": "ON",
    "USE_ARM_COMPUTE_LIB": "OFF",
    "USE_ARM_COMPUTE_LIB_GRAPH_EXECUTOR": "OFF",
    "USE_BLAS": "none",
    "USE_BNNS": "OFF",
    "USE_BYODT_POSIT": "OFF",
    "USE_CLML": "OFF",
    "USE_CLML_GRAPH_EXECUTOR": "OFF",
    "USE_CMSISNN": "OFF",
    "USE_COREML": "OFF",
    "USE_CPP_RPC": "OFF",
    "USE_CUBLAS": "OFF",
    "USE_CUDA": "/usr/lib/cuda-11.2",
    "USE_CUDNN": "OFF",
    "USE_CUSTOM_LOGGING": "OFF",
    "USE_CUTLASS": "OFF",
    "USE_DNNL": "OFF",
    "USE_ETHOSN": "OFF",
    "USE_FALLBACK_STL_MAP": "OFF",
    "USE_GRAPH_EXECUTOR": "ON",
    "USE_GRAPH_EXECUTOR_CUDA_GRAPH": "OFF",
    "USE_GTEST": "AUTO",
    "USE_HEXAGON": "OFF",
    "USE_HEXAGON_GTEST": "/path/to/hexagon/gtest",
    "USE_HEXAGON_RPC": "OFF",
    "USE_HEXAGON_SDK": "/path/to/sdk",
    "USE_IOS_RPC": "OFF",
    "USE_KHRONOS_SPIRV": "OFF",
    "USE_LIBBACKTRACE": "ON",
    "USE_LIBTORCH": "OFF",
    "USE_LLVM": "llvm-config-11",
    "USE_METAL": "OFF",
    "USE_MICRO": "OFF",
    "USE_MICRO_STANDALONE_RUNTIME": "OFF",
    "USE_MIOPEN": "OFF",
    "USE_MKL": "OFF",
    "USE_MSVC_MT": "OFF",
    "USE_NNPACK": "OFF",
    "USE_OPENCL": "OFF",
    "USE_OPENCL_GTEST": "/path/to/opencl/gtest",
    "USE_OPENMP": "none",
    "USE_PAPI": "OFF",
    "USE_PROFILER": "ON",
    "USE_PT_TVMDSOOP": "OFF",
    "USE_RANDOM": "ON",
    "USE_RELAY_DEBUG": "OFF",
    "USE_ROCBLAS": "OFF",
    "USE_ROCM": "OFF",
    "USE_RPC": "ON",
    "USE_RTTI": "ON",
    "USE_RUST_EXT": "OFF",
    "USE_SORT": "ON",
    "USE_SPIRV_KHR_INTEGER_DOT_PRODUCT": "OFF",
    "USE_STACKVM_RUNTIME": "OFF",
    "USE_TARGET_ONNX": "OFF",
    "USE_TENSORFLOW_PATH": "none",
    "USE_TENSORRT_CODEGEN": "OFF",
    "USE_TENSORRT_RUNTIME": "OFF",
    "USE_TFLITE": "OFF",
    "USE_TF_TVMDSOOP": "OFF",
    "USE_THREADS": "ON",
    "USE_THRUST": "OFF",
    "USE_VITIS_AI": "OFF",
    "USE_VULKAN": "OFF"
  }
```
  • Loading branch information
zxybazh authored and junrushao committed Jun 18, 2022
1 parent 2708b6c commit 7fc384e
Show file tree
Hide file tree
Showing 6 changed files with 17 additions and 2 deletions.
2 changes: 2 additions & 0 deletions python/tvm/auto_scheduler/testing/tune_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from tvm import relay
from tvm.meta_schedule.testing.custom_builder_runner import run_module_via_rpc
from tvm.relay.frontend import from_onnx
from tvm.support import describe


def _parse_args():
Expand Down Expand Up @@ -152,6 +153,7 @@ def main():
else:
raise NotImplementedError(f"Unsupported target {ARGS.target}")

describe()
print(f"Workload: {ARGS.model_name}")
onnx_model = onnx.load(ARGS.onnx_path)
shape_dict = {}
Expand Down
5 changes: 4 additions & 1 deletion python/tvm/auto_scheduler/testing/tune_relay.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from tvm import relay
from tvm.meta_schedule.testing.custom_builder_runner import run_module_via_rpc
from tvm.meta_schedule.testing.relay_workload import get_network
from tvm.support import describe


def _parse_args():
Expand Down Expand Up @@ -149,14 +150,16 @@ def main():
)
else:
raise NotImplementedError(f"Unsupported target {ARGS.target}")

describe()
print(f"Workload: {ARGS.workload}")
mod, params, (input_name, input_shape, input_dtype) = get_network(
ARGS.workload,
ARGS.input_shape,
cache_dir=ARGS.cache_dir,
)
input_info = {input_name: input_shape}
input_data = {}
print(f"Workload: {ARGS.workload}")
for input_name, input_shape in input_info.items():
print(f" input_name: {input_name}")
print(f" input_shape: {input_shape}")
Expand Down
3 changes: 3 additions & 0 deletions python/tvm/auto_scheduler/testing/tune_te.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import tvm
from tvm import auto_scheduler
from tvm.meta_schedule.testing.te_workload import CONFIGS
from tvm.support import describe


def _parse_args():
Expand Down Expand Up @@ -94,6 +95,8 @@ def _parse_args():


def main():
describe()
print(f"Workload: {ARGS.workload}")
log_file = os.path.join(ARGS.work_dir, f"{ARGS.workload}.json")
workload_func, params = CONFIGS[ARGS.workload]
params = params[0] # type: ignore
Expand Down
2 changes: 2 additions & 0 deletions python/tvm/meta_schedule/testing/tune_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from tvm import meta_schedule as ms
from tvm.meta_schedule.testing.custom_builder_runner import run_module_via_rpc
from tvm.relay.frontend import from_onnx
from tvm.support import describe


def _parse_args():
Expand Down Expand Up @@ -120,6 +121,7 @@ def _parse_args():


def main():
describe()
print(f"Workload: {ARGS.model_name}")
onnx_model = onnx.load(ARGS.onnx_path)
shape_dict = {}
Expand Down
4 changes: 3 additions & 1 deletion python/tvm/meta_schedule/testing/tune_relay.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from tvm import meta_schedule as ms
from tvm.meta_schedule.testing.custom_builder_runner import run_module_via_rpc
from tvm.meta_schedule.testing.relay_workload import get_network
from tvm.support import describe


def _parse_args():
Expand Down Expand Up @@ -118,14 +119,15 @@ def _parse_args():


def main():
describe()
print(f"Workload: {ARGS.workload}")
mod, params, (input_name, input_shape, input_dtype) = get_network(
ARGS.workload,
ARGS.input_shape,
cache_dir=ARGS.cache_dir,
)
input_info = {input_name: input_shape}
input_data = {}
print(f"Workload: {ARGS.workload}")
for input_name, input_shape in input_info.items():
print(f" input_name: {input_name}")
print(f" input_shape: {input_shape}")
Expand Down
3 changes: 3 additions & 0 deletions python/tvm/meta_schedule/testing/tune_te.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from tvm import meta_schedule as ms
from tvm import tir
from tvm.meta_schedule.testing.te_workload import create_te_workload
from tvm.support import describe


def _parse_args():
Expand Down Expand Up @@ -107,6 +108,8 @@ def _parse_args():


def main():
describe()
print(f"Workload: {ARGS.workload}")
runner = ms.runner.RPCRunner(
rpc_config=ARGS.rpc_config,
evaluator_config=ms.runner.EvaluatorConfig(
Expand Down

0 comments on commit 7fc384e

Please sign in to comment.