From 7fc384e30cfd689e8c64988dbd81ae7679809678 Mon Sep 17 00:00:00 2001 From: Xiyou Zhou Date: Fri, 17 Jun 2022 22:45:35 -0700 Subject: [PATCH] [MetaSchedule][Minor] Add Describe Function For Tuning Scripts (#11754) This PR is based on #11751 and adds `describe` function for `tune_relay` and `tune_onnx` script on both AutoScheduler and MetaSchedule. It prints out very useful information for reproducibility as follows: ``` Python Environment TVM version = 0.9.dev0 Python version = 3.8.8 (default, Apr 13 2021, 19:58:26) [GCC 7.3.0] (64 bit) os.uname() = Linux 5.15.5-76051505-generic #202111250933~1638201579~21.04~09f1aa7-Ubuntu SMP Tue Nov 30 02: x86_64 CMake Options: { "BUILD_STATIC_RUNTIME": "OFF", "COMPILER_RT_PATH": "3rdparty/compiler-rt", "CUDA_VERSION": "NOT-FOUND", "DLPACK_PATH": "3rdparty/dlpack/include", "DMLC_PATH": "3rdparty/dmlc-core/include", "GIT_COMMIT_HASH": "3b872a0adae07b0cd60248346fd31b158cba630c", "GIT_COMMIT_TIME": "2022-06-15 11:27:59 -0700", "HIDE_PRIVATE_SYMBOLS": "OFF", "INDEX_DEFAULT_I64": "ON", "INSTALL_DEV": "OFF", "LLVM_VERSION": "11.0.1", "PICOJSON_PATH": "3rdparty/picojson", "RANG_PATH": "3rdparty/rang/include", "ROCM_PATH": "/opt/rocm", "SUMMARIZE": "OFF", "TVM_CXX_COMPILER_PATH": "/usr/lib/ccache/c++", "USE_ALTERNATIVE_LINKER": "AUTO", "USE_AOT_EXECUTOR": "ON", "USE_ARM_COMPUTE_LIB": "OFF", "USE_ARM_COMPUTE_LIB_GRAPH_EXECUTOR": "OFF", "USE_BLAS": "none", "USE_BNNS": "OFF", "USE_BYODT_POSIT": "OFF", "USE_CLML": "OFF", "USE_CLML_GRAPH_EXECUTOR": "OFF", "USE_CMSISNN": "OFF", "USE_COREML": "OFF", "USE_CPP_RPC": "OFF", "USE_CUBLAS": "OFF", "USE_CUDA": "/usr/lib/cuda-11.2", "USE_CUDNN": "OFF", "USE_CUSTOM_LOGGING": "OFF", "USE_CUTLASS": "OFF", "USE_DNNL": "OFF", "USE_ETHOSN": "OFF", "USE_FALLBACK_STL_MAP": "OFF", "USE_GRAPH_EXECUTOR": "ON", "USE_GRAPH_EXECUTOR_CUDA_GRAPH": "OFF", "USE_GTEST": "AUTO", "USE_HEXAGON": "OFF", "USE_HEXAGON_GTEST": "/path/to/hexagon/gtest", "USE_HEXAGON_RPC": "OFF", "USE_HEXAGON_SDK": "/path/to/sdk", "USE_IOS_RPC": "OFF", "USE_KHRONOS_SPIRV": "OFF", "USE_LIBBACKTRACE": "ON", "USE_LIBTORCH": "OFF", "USE_LLVM": "llvm-config-11", "USE_METAL": "OFF", "USE_MICRO": "OFF", "USE_MICRO_STANDALONE_RUNTIME": "OFF", "USE_MIOPEN": "OFF", "USE_MKL": "OFF", "USE_MSVC_MT": "OFF", "USE_NNPACK": "OFF", "USE_OPENCL": "OFF", "USE_OPENCL_GTEST": "/path/to/opencl/gtest", "USE_OPENMP": "none", "USE_PAPI": "OFF", "USE_PROFILER": "ON", "USE_PT_TVMDSOOP": "OFF", "USE_RANDOM": "ON", "USE_RELAY_DEBUG": "OFF", "USE_ROCBLAS": "OFF", "USE_ROCM": "OFF", "USE_RPC": "ON", "USE_RTTI": "ON", "USE_RUST_EXT": "OFF", "USE_SORT": "ON", "USE_SPIRV_KHR_INTEGER_DOT_PRODUCT": "OFF", "USE_STACKVM_RUNTIME": "OFF", "USE_TARGET_ONNX": "OFF", "USE_TENSORFLOW_PATH": "none", "USE_TENSORRT_CODEGEN": "OFF", "USE_TENSORRT_RUNTIME": "OFF", "USE_TFLITE": "OFF", "USE_TF_TVMDSOOP": "OFF", "USE_THREADS": "ON", "USE_THRUST": "OFF", "USE_VITIS_AI": "OFF", "USE_VULKAN": "OFF" } ``` --- python/tvm/auto_scheduler/testing/tune_onnx.py | 2 ++ python/tvm/auto_scheduler/testing/tune_relay.py | 5 ++++- python/tvm/auto_scheduler/testing/tune_te.py | 3 +++ python/tvm/meta_schedule/testing/tune_onnx.py | 2 ++ python/tvm/meta_schedule/testing/tune_relay.py | 4 +++- python/tvm/meta_schedule/testing/tune_te.py | 3 +++ 6 files changed, 17 insertions(+), 2 deletions(-) diff --git a/python/tvm/auto_scheduler/testing/tune_onnx.py b/python/tvm/auto_scheduler/testing/tune_onnx.py index 2e6b9e5924e69..84ab1b48f8d25 100644 --- a/python/tvm/auto_scheduler/testing/tune_onnx.py +++ b/python/tvm/auto_scheduler/testing/tune_onnx.py @@ -27,6 +27,7 @@ from tvm import relay from tvm.meta_schedule.testing.custom_builder_runner import run_module_via_rpc from tvm.relay.frontend import from_onnx +from tvm.support import describe def _parse_args(): @@ -152,6 +153,7 @@ def main(): else: raise NotImplementedError(f"Unsupported target {ARGS.target}") + describe() print(f"Workload: {ARGS.model_name}") onnx_model = onnx.load(ARGS.onnx_path) shape_dict = {} diff --git a/python/tvm/auto_scheduler/testing/tune_relay.py b/python/tvm/auto_scheduler/testing/tune_relay.py index 48ed44ef19b78..2bd78139993be 100644 --- a/python/tvm/auto_scheduler/testing/tune_relay.py +++ b/python/tvm/auto_scheduler/testing/tune_relay.py @@ -26,6 +26,7 @@ from tvm import relay from tvm.meta_schedule.testing.custom_builder_runner import run_module_via_rpc from tvm.meta_schedule.testing.relay_workload import get_network +from tvm.support import describe def _parse_args(): @@ -149,6 +150,9 @@ def main(): ) else: raise NotImplementedError(f"Unsupported target {ARGS.target}") + + describe() + print(f"Workload: {ARGS.workload}") mod, params, (input_name, input_shape, input_dtype) = get_network( ARGS.workload, ARGS.input_shape, @@ -156,7 +160,6 @@ def main(): ) input_info = {input_name: input_shape} input_data = {} - print(f"Workload: {ARGS.workload}") for input_name, input_shape in input_info.items(): print(f" input_name: {input_name}") print(f" input_shape: {input_shape}") diff --git a/python/tvm/auto_scheduler/testing/tune_te.py b/python/tvm/auto_scheduler/testing/tune_te.py index b02a6059e23dd..2eaddbbc081e9 100644 --- a/python/tvm/auto_scheduler/testing/tune_te.py +++ b/python/tvm/auto_scheduler/testing/tune_te.py @@ -21,6 +21,7 @@ import tvm from tvm import auto_scheduler from tvm.meta_schedule.testing.te_workload import CONFIGS +from tvm.support import describe def _parse_args(): @@ -94,6 +95,8 @@ def _parse_args(): def main(): + describe() + print(f"Workload: {ARGS.workload}") log_file = os.path.join(ARGS.work_dir, f"{ARGS.workload}.json") workload_func, params = CONFIGS[ARGS.workload] params = params[0] # type: ignore diff --git a/python/tvm/meta_schedule/testing/tune_onnx.py b/python/tvm/meta_schedule/testing/tune_onnx.py index 3a1b4cd5fe206..1a51622b5cde5 100644 --- a/python/tvm/meta_schedule/testing/tune_onnx.py +++ b/python/tvm/meta_schedule/testing/tune_onnx.py @@ -25,6 +25,7 @@ from tvm import meta_schedule as ms from tvm.meta_schedule.testing.custom_builder_runner import run_module_via_rpc from tvm.relay.frontend import from_onnx +from tvm.support import describe def _parse_args(): @@ -120,6 +121,7 @@ def _parse_args(): def main(): + describe() print(f"Workload: {ARGS.model_name}") onnx_model = onnx.load(ARGS.onnx_path) shape_dict = {} diff --git a/python/tvm/meta_schedule/testing/tune_relay.py b/python/tvm/meta_schedule/testing/tune_relay.py index 8663eb460c4a8..6188e124fde82 100644 --- a/python/tvm/meta_schedule/testing/tune_relay.py +++ b/python/tvm/meta_schedule/testing/tune_relay.py @@ -24,6 +24,7 @@ from tvm import meta_schedule as ms from tvm.meta_schedule.testing.custom_builder_runner import run_module_via_rpc from tvm.meta_schedule.testing.relay_workload import get_network +from tvm.support import describe def _parse_args(): @@ -118,6 +119,8 @@ def _parse_args(): def main(): + describe() + print(f"Workload: {ARGS.workload}") mod, params, (input_name, input_shape, input_dtype) = get_network( ARGS.workload, ARGS.input_shape, @@ -125,7 +128,6 @@ def main(): ) input_info = {input_name: input_shape} input_data = {} - print(f"Workload: {ARGS.workload}") for input_name, input_shape in input_info.items(): print(f" input_name: {input_name}") print(f" input_shape: {input_shape}") diff --git a/python/tvm/meta_schedule/testing/tune_te.py b/python/tvm/meta_schedule/testing/tune_te.py index b2649564bfa98..cbc310f999ad9 100644 --- a/python/tvm/meta_schedule/testing/tune_te.py +++ b/python/tvm/meta_schedule/testing/tune_te.py @@ -24,6 +24,7 @@ from tvm import meta_schedule as ms from tvm import tir from tvm.meta_schedule.testing.te_workload import create_te_workload +from tvm.support import describe def _parse_args(): @@ -107,6 +108,8 @@ def _parse_args(): def main(): + describe() + print(f"Workload: {ARGS.workload}") runner = ms.runner.RPCRunner( rpc_config=ARGS.rpc_config, evaluator_config=ms.runner.EvaluatorConfig(