Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Dynamic subgraph compile support #17623

Merged
merged 56 commits into from
Mar 19, 2020
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
Show all changes
56 commits
Select commit Hold shift + click to select a range
639db17
passed args down to acceptSubgraph
samskalicky Feb 17, 2020
5898d53
added example and set param names on inputs to subgraph to map
samskalicky Feb 18, 2020
2294584
increased lib_api version number
samskalicky Feb 19, 2020
fad8e74
fixed whitespace
samskalicky Feb 19, 2020
734f1c4
fixed spacing
samskalicky Feb 19, 2020
934ae8f
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
samskalicky Feb 20, 2020
ceed9be
added info about lib_api.h to README
samskalicky Feb 20, 2020
098db85
updated readme for new args argument to reviewSubgraph
samskalicky Feb 20, 2020
cfcc0a6
added more tests
samskalicky Feb 20, 2020
1fa7f1d
added example for partitioning HybridBlock in-place without forward pass
samskalicky Feb 21, 2020
8f37c48
added example for partitioning
samskalicky Feb 21, 2020
729173f
fixed whitespace
samskalicky Feb 21, 2020
bb90d70
fixed sanity
samskalicky Feb 21, 2020
06c3841
fixed lint
samskalicky Feb 21, 2020
f8f6191
added support for passing aux
samskalicky Feb 22, 2020
dc17e3f
fixed lint
samskalicky Feb 22, 2020
56bbb01
sanity
samskalicky Feb 22, 2020
a12517d
perl changes
samskalicky Feb 22, 2020
c5d322e
replaced code with hybridize call
samskalicky Feb 24, 2020
4333260
added unittest for gluon optimize_for
samskalicky Feb 24, 2020
adde456
fixed whitespace
samskalicky Feb 24, 2020
8f58f33
fixed test
samskalicky Feb 24, 2020
4daefa7
addressed comments
samskalicky Feb 26, 2020
68f3de0
fixed grammar
samskalicky Feb 26, 2020
3e4b09a
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
samskalicky Feb 26, 2020
520edcc
fixed spelling
samskalicky Feb 26, 2020
55d575b
added aux argument to the reviewSubgraph API in README
samskalicky Feb 26, 2020
005b53c
updated infer shape to use aux for optimize_for
samskalicky Feb 27, 2020
a51486c
Merge branch 'subgraph_compile' of https://github.com/samskalicky/inc…
samskalicky Feb 27, 2020
668e315
fixed spacing
samskalicky Feb 27, 2020
bb7e52d
changed shape/dtype keys so they dont conflict with MXNet operator attrs
samskalicky Feb 27, 2020
20382ae
added error message to show missing arg/aux
samskalicky Feb 27, 2020
a2a9df1
added calls to setDLtensor for MXTensor constructors
samskalicky Feb 27, 2020
23958da
changed tests to pass aux in addition to args
samskalicky Feb 27, 2020
4f1d0d2
fixed bug passing attributes
samskalicky Feb 29, 2020
127bcbf
fixed memory leak where user attribute strings were not freed
samskalicky Feb 29, 2020
3f57b9b
added passing down shapes/dtypes to subgraph inputs
samskalicky Feb 29, 2020
c971fdc
fixed style
samskalicky Feb 29, 2020
2d9995a
fixed docstring
samskalicky Mar 5, 2020
ade7d48
removed space
samskalicky Mar 5, 2020
736cd8f
changed defines
samskalicky Mar 6, 2020
7fb9ea3
fixed bug in indexing into map with shapes/types when annotating the …
samskalicky Mar 13, 2020
b0a79e5
added support for MKLDNN tensor format conversion in case user does p…
samskalicky Mar 13, 2020
488740a
cleaned up code and added comments
samskalicky Mar 13, 2020
3b278be
fixed whitespace
samskalicky Mar 13, 2020
26734fe
added guards around MKLDNN checks for non-MKLDNN builds
samskalicky Mar 13, 2020
277288d
refactor to use pointers to reduce code duplication
samskalicky Mar 14, 2020
5940450
added MKLDNN guards for custom op
samskalicky Mar 14, 2020
c1d3f5e
fixed whitespace
samskalicky Mar 14, 2020
0b38e5c
added subgraph property API to let subg_prop initialize subgraph inputs
samskalicky Mar 16, 2020
d59a4dc
moved custom code to subgraph property API, cleaned up build_subgraph.cc
samskalicky Mar 16, 2020
5abc8c3
added support for ops with multiple outputs and InitSubgraphInputs
samskalicky Mar 16, 2020
90f6973
fixed sanity, removed prints
samskalicky Mar 16, 2020
28b6bef
fixed whitespace
samskalicky Mar 16, 2020
516d149
fixed shape/dtype parsing
samskalicky Mar 17, 2020
4e2efec
fixed lint
samskalicky Mar 17, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 18 additions & 2 deletions example/extensions/lib_subgraph/subgraph_lib.cc
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ MXReturnValue mySupportedOps(std::string json,
}

//check if op dtype is float
if(dtype == kFloat32) {
if((dtype == kFloat32 && options.count("reqFloat") > 0) || options.count("reqFloat") == 0) {
samskalicky marked this conversation as resolved.
Show resolved Hide resolved
//check if op is in whitelist
if(std::find(op_names.begin(),op_names.end(),op.str.c_str()) != op_names.end()) {
// found op in whitelist, set value to 1 to include op in subgraph
Expand All @@ -219,10 +219,26 @@ MXReturnValue mySupportedOps(std::string json,

MXReturnValue myAcceptSubgraph(std::string json, int subraph_id, bool* accept,
std::unordered_map<std::string, std::string>& options,
std::unordered_map<std::string, std::string>& attrs) {
std::unordered_map<std::string, std::string>& attrs,
std::map<std::string, MXTensor>& args) {
for (auto kv : options) {
std::cout << "option: " << kv.first << " ==> " << kv.second << std::endl;
}
for (auto kv : args) {
samskalicky marked this conversation as resolved.
Show resolved Hide resolved
std::cout << "arg: " << kv.first << " ==> (";
for (auto s : kv.second.shape)
std::cout << s << ",";
std::cout << ") [";
for (int i=0; i<kv.second.size(); i++)
std::cout << kv.second.data<float>()[i] << ", ";
std::cout << "]" << std::endl;
}
if(options.count("reqArgs") > 0 && args.size() == 0) {
samskalicky marked this conversation as resolved.
Show resolved Hide resolved
*accept = false;
std::cout << "rejecting subgraph since args were not provided" << std::endl;
return MX_SUCCESS;
}

if(options.find("reject") != options.end() &&
options["reject"].compare("True") == 0) {
*accept = false;
Expand Down
30 changes: 30 additions & 0 deletions example/extensions/lib_subgraph/test_subgraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,17 @@
path = os.path.abspath('libsubgraph_lib.dll')
mx.library.load(path)

# example model, ops to be partitioned do not have args (use outputs from other ops as inputs)
a = mx.sym.var('a')
b = mx.sym.var('b')
c = a + b
d = mx.sym.exp(c)
sym = mx.sym.log(d)

# example model, ops to be partitioned have args
d2 = mx.sym.exp(a)
sym2 = mx.sym.log(d2)

#execute in MXNet
print('-------------------------------')
print('Testing regular MXNet execution')
Expand Down Expand Up @@ -74,3 +79,28 @@
exe3 = mysym3.bind(ctx=mx.cpu(), args={'a':mx.nd.ones((3,2)), 'b':mx.nd.ones((3,2))})
out3 = exe3.forward()
print(out3)

#execute in MXNet
print('-------------------------------')
print('Testing regular MXNet execution')
exe4 = sym2.bind(ctx=mx.cpu(), args={'a':mx.nd.ones((3,2))})
out4 = exe4.forward()
print(out4)

# with propogating shapes/types
print('-------------------------------')
print('Testing partitioning with shapes/types')
arg_array = [mx.nd.ones((3,2),dtype='float32')]
mysym5 = sym2.optimize_for("myProp", arg_array, reqArgs=True)
print(mysym5.tojson())
exe5 = mysym5.bind(ctx=mx.cpu(), args={'a':mx.nd.ones((3,2))})
out5 = exe5.forward()
print(out5)

# without propogating shapes/types
print('-------------------------------')
print('Testing partitioning without shapes/types')
mysym6 = sym2.optimize_for("myProp", reqArgs=True)
exe6 = mysym6.bind(ctx=mx.cpu(), args={'a':mx.nd.ones((3,2))})
out6 = exe6.forward()
print(out6)
35 changes: 30 additions & 5 deletions include/mxnet/lib_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
#include <utility>
#include <stdexcept>

#define MX_LIBRARY_VERSION 3
#define MX_LIBRARY_VERSION 4

/*!
* \brief For loading multiple custom op libraries in Linux, exporting same symbol multiple
Expand Down Expand Up @@ -717,7 +717,8 @@ typedef MXReturnValue (*supportedOps_t)(std::string, int, int*,
std::unordered_map<std::string, std::string>&);
typedef MXReturnValue (*acceptSubgraph_t)(std::string, int, bool*,
std::unordered_map<std::string, std::string>&,
std::unordered_map<std::string, std::string>&);
std::unordered_map<std::string, std::string>&,
std::map<std::string, MXTensor>&);

/*!
* \brief An abstract class for subgraph property
Expand Down Expand Up @@ -920,7 +921,12 @@ typedef int (*partCallSupportedOps_t)(supportedOps_t supportedOps, const char *j
typedef int (*partCallAcceptSubgraph_t)(acceptSubgraph_t acceptSubgraph, const char *json,
int subgraph_id, int *accept, const char* const* opt_keys,
const char* const* opt_vals, int num_opts,
char*** attr_keys, char*** attr_vals, int *num_attrs);
char*** attr_keys, char*** attr_vals, int *num_attrs,
const char* const* arg_names, int num_args,
void* const* arg_data, const int64_t* const* arg_shapes,
const int* arg_dims, const int* arg_types,
const size_t* arg_IDs, const char* const* arg_dev_type,
const int* arg_dev_id);

#define MXLIB_INITIALIZE_STR "initialize"
typedef int (*initialize_t)(int version);
Expand Down Expand Up @@ -1283,7 +1289,12 @@ extern "C" {
_partCallAcceptSubgraph(acceptSubgraph_t acceptSubgraph, const char *json,
int subgraph_id, int *accept, const char* const* opt_keys,
const char* const* opt_vals, int num_opts,
char*** attr_keys, char*** attr_vals, int *num_attrs) {
char*** attr_keys, char*** attr_vals, int *num_attrs,
const char* const* arg_names, int num_args,
void* const* arg_data, const int64_t* const* arg_shapes,
const int* arg_dims, const int* arg_types,
const size_t* arg_IDs, const char* const* arg_dev_type,
const int* arg_dev_id) {
std::string subgraph_json(json);
bool accept_bool = false;
// create map of attributes from list
Expand All @@ -1292,10 +1303,24 @@ extern "C" {
opts[std::string(opt_keys[i])] = std::string(opt_vals[i]);
}

// create a map of named tensors for args
std::map<std::string, MXTensor> args;
for (int i = 0; i < num_args; i++) {
std::vector<int64_t> shapes;
for (int j = 0; j < arg_dims[i]; j++)
shapes.push_back(arg_shapes[i][j]);

MXTensor tensor(arg_data[i], shapes, (MXDType)arg_types[i],
arg_IDs[i], {arg_dev_type[i], arg_dev_id[i]});
args[arg_names[i]] = tensor;
}


// attributes to set on subgraph node
std::unordered_map<std::string, std::string> attrs;

MXReturnValue retval = acceptSubgraph(subgraph_json, subgraph_id, &accept_bool, opts, attrs);
MXReturnValue retval = acceptSubgraph(subgraph_json, subgraph_id, &accept_bool,
opts, attrs, args);
*accept = accept_bool;

if (attrs.size() > 0) {
Expand Down
9 changes: 4 additions & 5 deletions src/c_api/c_api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -821,11 +821,10 @@ int MXLoadLib(const char *path) {
LOG(INFO) << "\t\tStrategy[" << j << "] " << strategy_str
<< " subgraphOp: '" << op_name_str << "'";

// MXNET_REGISTER_SUBGRAPH_PROPERTY(customBackend, CustomSubgraphProperty);
mxnet::op::SubgraphBackendRegistry::Get()->__REGISTER_CUSTOM_PROPERTY__(name_str,
std::make_shared<mxnet::op::CustomSubgraphProperty>(
strategy_str, callSupportedOps, supportedOps_fp,
callAcceptSubgraph, acceptSubgraph_fp, callFree, op_name_str));
mxnet::op::SubgraphBackendRegistry::Get()->__REGISTER_CUSTOM_PROPERTY__
(name_str, std::make_shared<mxnet::op::CustomSubgraphProperty>
(strategy_str, callSupportedOps, supportedOps_fp,
callAcceptSubgraph, acceptSubgraph_fp, callFree, op_name_str));
}
}
API_END();
Expand Down
8 changes: 8 additions & 0 deletions src/c_api/c_api_symbolic.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1383,6 +1383,14 @@ int MXOptimizeForBackend(SymbolHandle sym_handle,
common::HandleInferStorageTypeError(num_forward_inputs, indexed_graph,
g.GetAttr<StorageTypeVector>("storage_type"));
}
std::vector<std::string> arg_names = sym->ListInputNames(nnvm::Symbol::kReadOnlyArgs);
g.attrs["in_args"] = std::make_shared<nnvm::any>(in_args_ptr);
g.attrs["in_arg_names"] = std::make_shared<nnvm::any>(arg_names);
} else {
NDArray **in_args_ptr = static_cast<NDArray**>(nullptr);
std::vector<std::string> arg_names;
g.attrs["in_args"] = std::make_shared<nnvm::any>(in_args_ptr);
g.attrs["in_arg_names"] = std::make_shared<nnvm::any>(arg_names);
}
std::vector<std::pair<std::string, std::string>> options_map;
for (mx_uint i = 0; i < num_options; ++i) {
Expand Down
6 changes: 4 additions & 2 deletions src/operator/subgraph/build_subgraph.cc
Original file line number Diff line number Diff line change
Expand Up @@ -561,10 +561,12 @@ void CutGraphInputs(const std::vector<nnvm::NodeEntry*> &input_entries,
nnvm::ObjectPtr n = nnvm::CreateVariableNode(
var_name + std::to_string(name_count_map[var_name]));
// set attribute for subgraph input to indicate if it is from an arg/param to model
if (e->node->is_variable())
if (e->node->is_variable()) {
n->attrs.dict["isArg"] = "True";
else
n->attrs.dict["argName"] = var_name;
samskalicky marked this conversation as resolved.
Show resolved Hide resolved
samskalicky marked this conversation as resolved.
Show resolved Hide resolved
} else {
n->attrs.dict["isArg"] = "False";
}
*e = nnvm::NodeEntry{n, 0, 0};
}
}
Expand Down
48 changes: 44 additions & 4 deletions src/operator/subgraph/partitioner/custom_subgraph_property.h
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,31 @@ class CustomSubgraphProperty: public SubgraphProperty {
const std::vector<std::pair<std::string, std::string>>& options_map) {
// clear supported_nodes to remove state from previous calls
supported_nodes.clear();
// get input args and arg names
in_arg_names = g.GetAttr<std::vector<std::string>>("in_arg_names");
in_args_ptr = g.GetAttr<NDArray**>("in_args");

// convert input args
arg_names.clear();
arg_data.clear();
arg_shapes.clear();
arg_dims.clear();
arg_types.clear();
arg_verIDs.clear();
arg_dev_type.clear();
arg_dev_id.clear();
for (size_t i=0; i < in_arg_names.size(); i++) {
arg_names.push_back(in_arg_names[i].c_str());
const auto &in_arg = *(in_args_ptr[i]);
arg_data.push_back(in_arg.data().dptr_);
arg_shapes.push_back(in_arg.shape().data());
arg_dims.push_back(in_arg.shape().ndim());
arg_types.push_back(in_arg.dtype());
arg_verIDs.push_back(in_arg.version());
const char* ctx_str = in_arg.ctx().dev_mask() == Context::kCPU ? "cpu" : "gpu";
arg_dev_type.push_back(ctx_str);
arg_dev_id.push_back(in_arg.ctx().real_dev_id());
}

// remove all graph attrs, some cannot be saved to json
nnvm::Graph graph = std::move(g);
Expand Down Expand Up @@ -162,7 +187,7 @@ class CustomSubgraphProperty: public SubgraphProperty {
}
// override CreateSubgraphNode
virtual nnvm::ObjectPtr CreateSubgraphNode(const nnvm::Symbol &sym,
const int subgraph_id = 0) const {
const int subgraph_id = 0) const {
int accept = 1;
int num_attr = 0;
char** attr_keys = nullptr;
Expand All @@ -189,9 +214,14 @@ class CustomSubgraphProperty: public SubgraphProperty {

std::string subgraph_json = nnvm::pass::SaveJSON(g);
CHECK(call_accept_subgraph_(accept_subgraph_, subgraph_json.c_str(),
subgraph_id, &accept, opt_keys_.data(),
opt_vals_.data(), opt_keys_.size(),
&attr_keys, &attr_vals, &num_attr))
subgraph_id, &accept, opt_keys_.data(),
opt_vals_.data(), opt_keys_.size(),
&attr_keys, &attr_vals, &num_attr,
arg_names.data(), arg_names.size(),
arg_data.data(), arg_shapes.data(),
arg_dims.data(), arg_types.data(),
arg_verIDs.data(), arg_dev_type.data(),
arg_dev_id.data()))
<< "Error calling accept_subgraph for '" << subgraph_prop << "'";
}
if (accept) {
Expand Down Expand Up @@ -228,6 +258,16 @@ class CustomSubgraphProperty: public SubgraphProperty {
std::string subgraph_op_name;
std::vector<std::pair<std::string, std::string>> options_map_;
std::vector<const char*> opt_keys_, opt_vals_;
std::vector<std::string> in_arg_names;
NDArray **in_args_ptr;
std::vector<const char*> arg_names;
std::vector<void*> arg_data;
std::vector<const int64_t*> arg_shapes;
std::vector<int> arg_dims;
std::vector<int> arg_types;
std::vector<size_t> arg_verIDs;
std::vector<const char*> arg_dev_type;
std::vector<int> arg_dev_id;
};
} // namespace op
} // namespace mxnet
Expand Down