Skip to content

Commit

Permalink
remove use_dynamic_shared_memory from FunctionInfo meta data
Browse files Browse the repository at this point in the history
  • Loading branch information
masa committed Jul 20, 2021
1 parent 06187d6 commit 4450e02
Show file tree
Hide file tree
Showing 7 changed files with 33 additions and 31 deletions.
8 changes: 3 additions & 5 deletions src/runtime/cuda/cuda_module.cc
Original file line number Diff line number Diff line change
Expand Up @@ -153,13 +153,12 @@ class CUDAWrappedFunc {
public:
// initialize the CUDA function.
void Init(CUDAModuleNode* m, ObjectPtr<Object> sptr, const std::string& func_name,
size_t num_void_args, const std::vector<std::string>& launch_param_tags,
bool use_dyn_shared_memory) {
size_t num_void_args, const std::vector<std::string>& launch_param_tags) {
m_ = m;
sptr_ = sptr;
func_name_ = func_name;
std::fill(fcache_.begin(), fcache_.end(), nullptr);
launch_param_config_.Init(num_void_args, launch_param_tags, use_dyn_shared_memory);
launch_param_config_.Init(num_void_args, launch_param_tags);
}
// invoke the function with void arguments
void operator()(TVMArgs args, TVMRetValue* rv, void** void_args) const {
Expand Down Expand Up @@ -242,8 +241,7 @@ PackedFunc CUDAModuleNode::GetFunction(const std::string& name,
if (it == fmap_.end()) return PackedFunc();
const FunctionInfo& info = it->second;
CUDAWrappedFunc f;
f.Init(this, sptr_to_self, name, info.arg_types.size(), info.launch_param_tags,
info.use_dyn_shared_memory);
f.Init(this, sptr_to_self, name, info.arg_types.size(), info.launch_param_tags);
return PackFuncVoidAddr(f, info.arg_types);
}

Expand Down
8 changes: 3 additions & 5 deletions src/runtime/file_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ void FunctionInfo::Save(dmlc::JSONWriter* writer) const {
writer->WriteObjectKeyValue("name", name);
writer->WriteObjectKeyValue("arg_types", sarg_types);
writer->WriteObjectKeyValue("launch_param_tags", launch_param_tags);
writer->WriteObjectKeyValue("use_dyn_shared_memory", use_dyn_shared_memory);
writer->EndObject();
}

Expand All @@ -53,8 +52,9 @@ void FunctionInfo::Load(dmlc::JSONReader* reader) {
std::vector<std::string> sarg_types;
helper.DeclareField("name", &name);
helper.DeclareField("arg_types", &sarg_types);
helper.DeclareField("launch_param_tags", &launch_param_tags);
helper.DeclareOptionalField("use_dyn_shared_memory", &use_dyn_shared_memory);
helper.DeclareOptionalField("launch_param_tags", &launch_param_tags);
helper.DeclareOptionalField("thread_axis_tags",
&launch_param_tags); // for backward compatibility
helper.ReadAllFields(reader);
arg_types.resize(sarg_types.size());
for (size_t i = 0; i < arg_types.size(); ++i) {
Expand All @@ -66,14 +66,12 @@ void FunctionInfo::Save(dmlc::Stream* writer) const {
writer->Write(name);
writer->Write(arg_types);
writer->Write(launch_param_tags);
writer->Write(use_dyn_shared_memory);
}

bool FunctionInfo::Load(dmlc::Stream* reader) {
if (!reader->Read(&name)) return false;
if (!reader->Read(&arg_types)) return false;
if (!reader->Read(&launch_param_tags)) return false;
if (!reader->Read(&use_dyn_shared_memory)) return false;
return true;
}

Expand Down
4 changes: 3 additions & 1 deletion src/runtime/meta_data.h
Original file line number Diff line number Diff line change
Expand Up @@ -99,12 +99,14 @@ Module MetadataModuleCreate(
const std::unordered_map<std::string, NDArray>& metadata,
const std::unordered_map<std::string, std::vector<std::string>>& sym_vars);

/*! \brief A tag to specify whether or not dynamic shared memory is used */
constexpr const char* kUseDynamicSharedMemoryTag = "tir.use_dyn_shared_memory";

/*! \brief function information needed by device */
struct FunctionInfo {
std::string name;
std::vector<DLDataType> arg_types;
std::vector<std::string> launch_param_tags;
bool use_dyn_shared_memory{false};

void Save(dmlc::JSONWriter* writer) const;
void Load(dmlc::JSONReader* reader);
Expand Down
8 changes: 3 additions & 5 deletions src/runtime/rocm/rocm_module.cc
Original file line number Diff line number Diff line change
Expand Up @@ -147,13 +147,12 @@ class ROCMWrappedFunc {
public:
// initialize the ROCM function.
void Init(ROCMModuleNode* m, ObjectPtr<Object> sptr, const std::string& func_name,
size_t num_void_args, const std::vector<std::string>& launch_param_tags,
bool use_dyn_shared_memory) {
size_t num_void_args, const std::vector<std::string>& launch_param_tags) {
m_ = m;
sptr_ = sptr;
func_name_ = func_name;
std::fill(fcache_.begin(), fcache_.end(), nullptr);
launch_param_config_.Init(num_void_args, launch_param_tags, use_dyn_shared_memory);
launch_param_config_.Init(num_void_args, launch_param_tags);
}
// invoke the function with void arguments
void operator()(TVMArgs args, TVMRetValue* rv, void* packed_args, size_t packed_nbytes) const {
Expand Down Expand Up @@ -197,8 +196,7 @@ PackedFunc ROCMModuleNode::GetFunction(const std::string& name,
if (it == fmap_.end()) return PackedFunc();
const FunctionInfo& info = it->second;
ROCMWrappedFunc f;
f.Init(this, sptr_to_self, name, info.arg_types.size(), info.launch_param_tags,
info.use_dyn_shared_memory);
f.Init(this, sptr_to_self, name, info.arg_types.size(), info.launch_param_tags);
return PackFuncPackedArg(f, info.arg_types);
}

Expand Down
10 changes: 7 additions & 3 deletions src/runtime/thread_storage_scope.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@
#include <string>
#include <vector>

#include "meta_data.h"

namespace tvm {
namespace runtime {

Expand Down Expand Up @@ -198,13 +200,15 @@ struct ThreadWorkLoad {
/*! \brief Thread axis configuration */
class LaunchParamConfig {
public:
void Init(size_t base, const std::vector<std::string>& launch_param_tags,
bool use_dyn_shared_memory = false) {
void Init(size_t base, const std::vector<std::string>& launch_param_tags) {
base_ = base;
use_dyn_shared_memory_ = use_dyn_shared_memory;
std::vector<bool> filled(6, false);
for (size_t i = 0; i < launch_param_tags.size(); ++i) {
const std::string& tag = launch_param_tags[i];
if (tag == kUseDynamicSharedMemoryTag) {
use_dyn_shared_memory_ = true;
continue;
}
ThreadScope ts = ThreadScope::Create(tag);
arg_index_map_.push_back(ts.rank * 3 + ts.dim_index);
filled[ts.rank * 3 + ts.dim_index] = true;
Expand Down
4 changes: 3 additions & 1 deletion src/target/build_common.h
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ inline std::unordered_map<std::string, runtime::FunctionInfo> ExtractFuncInfo(co
}
}
if (auto opt = f->GetAttr<Integer>(tir::attr::kDeviceUseDynSharedMemory)) {
info.use_dyn_shared_memory = opt.value();
if (opt.value()) {
info.launch_param_tags.push_back(runtime::kUseDynamicSharedMemoryTag);
}
}
auto global_symbol = f->GetAttr<String>(tvm::attr::kGlobalSymbol);
fmap[static_cast<std::string>(global_symbol.value())] = info;
Expand Down
22 changes: 11 additions & 11 deletions tests/python/unittest/test_tir_ir_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -538,8 +538,8 @@ def test_device_ir(A, B):
s = te.create_schedule(B.op)

def check_target(target):
if not tvm.testing.device_enabled(target):
return
# if not tvm.testing.device_enabled(target):
# return

freduce = tvm.build(s, [A, B], target)
dev = tvm.device(target, 0)
Expand All @@ -555,13 +555,13 @@ def check_target(target):


if __name__ == "__main__":
test_prefetch()
test_if()
test_for()
test_cpu()
test_gpu()
test_while_vectorize()
test_while_collatz()
test_while_mandel()
test_while_binary_search()
# test_prefetch()
# test_if()
# test_for()
# test_cpu()
# test_gpu()
# test_while_vectorize()
# test_while_collatz()
# test_while_mandel()
# test_while_binary_search()
test_dyn_shared()

0 comments on commit 4450e02

Please sign in to comment.