Skip to content

Commit

Permalink
Update ONNX graphsurgeon to v0.3.11
Browse files Browse the repository at this point in the history
Signed-off-by: Rajeev Rao <rajeevrao@nvidia.com>
  • Loading branch information
rajeevsrao committed Aug 5, 2021
1 parent b5ff243 commit b06a301
Show file tree
Hide file tree
Showing 24 changed files with 503 additions and 391 deletions.
10 changes: 9 additions & 1 deletion tools/onnx-graphsurgeon/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,17 @@
Dates are in YYYY-MM-DD format.


## v0.3.11 (2021-07-14)
### Changed
- Updated `fold_constants()` so that it no longer fails if a shape folding pass fails when `error_ok` is `True`.

### Fixed
- Fixed a bug where `fold_constants()` would fail if a model contained a `Slice` node without a `starts` or `ends` input.


## v0.3.10 (2021-05-20)
### Added
- Added support for folding `Shape -> Slice` patterns even when the entire shape may not be known.
- Added support for folding `Shape -> Slice` patterns even when the entire shape may not be known.


## v0.3.9 (2021-04-20)
Expand Down
41 changes: 20 additions & 21 deletions tools/onnx-graphsurgeon/docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,17 @@
#
import sys
import os

ROOT_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)), os.path.pardir)
sys.path.insert(0, ROOT_DIR)
import onnx_graphsurgeon as gs

extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.autosummary',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.autosummary",
"sphinx.ext.napoleon",
"sphinx.ext.mathjax",
]

# Want to be able to generate docs with no dependencies installed
Expand All @@ -42,50 +43,48 @@

autosummary_generate = True

source_suffix = ['.rst']
source_suffix = [".rst"]

# The master toctree document.
master_doc = 'index'
master_doc = "index"

# General information about the project.
project = 'ONNX GraphSurgeon'
copyright = '2020, NVIDIA'
author = 'NVIDIA'
project = "ONNX GraphSurgeon"
copyright = "2020, NVIDIA"
author = "NVIDIA"

version = gs.__version__
# The full version, including alpha/beta/rc tags.
release = version

# Style
pygments_style = 'colorful'
pygments_style = "colorful"

html_theme = 'sphinx_rtd_theme'
html_theme = "sphinx_rtd_theme"

# Use the TRT theme and NVIDIA logo
html_static_path = ['_static']
html_static_path = ["_static"]

html_logo = '_static/img/nvlogo_white.png'
html_logo = "_static/img/nvlogo_white.png"

# Hide source link
html_show_sourcelink = False

# Output file base name for HTML help builder.
htmlhelp_basename = 'OnnxGraphSurgeonDoc'
htmlhelp_basename = "OnnxGraphSurgeonDoc"

# Template files to extend default Sphinx templates.
# See https://www.sphinx-doc.org/en/master/templating.html for details.
templates_path = ["_templates"]

# For constructor arguments to show up in Sphinx generated doc
autoclass_content = 'both'
autoclass_content = "both"

# Unlimited depth sidebar.
html_theme_options = {
'navigation_depth': -1
}
html_theme_options = {"navigation_depth": -1}

html_sidebars = { '**': ['globaltoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html'] }
html_sidebars = {"**": ["globaltoc.html", "relations.html", "sourcelink.html", "searchbox.html"]}

# Allows us to override the default page width in the Sphinx theme.
def setup(app):
app.add_css_file('style.css')
app.add_css_file("style.css")
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,9 @@ def mul(self, a, b):
@gs.Graph.register()
def gemm(self, a, b, trans_a=False, trans_b=False):
attrs = {"transA": int(trans_a), "transB": int(trans_b)}
return propagate_dtype(self.layer(op="Gemm", inputs=[a, b], outputs=["gemm_out_gs"], attrs=attrs), a.dtype or b.dtype)
return propagate_dtype(
self.layer(op="Gemm", inputs=[a, b], outputs=["gemm_out_gs"], attrs=attrs), a.dtype or b.dtype
)


# You can also specify a set of opsets when regsitering a function.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,12 @@
def min(self, *args):
return self.layer(op="Min", inputs=args, outputs=["min_out"])[0]


@gs.Graph.register()
def max(self, *args):
return self.layer(op="Max", inputs=args, outputs=["max_out"])[0]


@gs.Graph.register()
def identity(self, inp):
return self.layer(op="Identity", inputs=[inp], outputs=["identity_out"])[0]
Expand All @@ -44,7 +46,9 @@ def identity(self, inp):
# Add identity nodes to make the graph structure a bit more interesting
inp = graph.identity(graph.inputs[0])
max_out = graph.max(graph.min(inp, MAX_VAL), MIN_VAL)
graph.outputs = [graph.identity(max_out), ]
graph.outputs = [
graph.identity(max_out),
]

# Graph outputs must include dtype information
graph.outputs[0].to_variable(dtype=np.float32, shape=(4, 4))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@ def shape(self, a):

@gs.Graph.register()
def reduce_prod(self, a, axes, keepdims=True):
return self.layer(op="ReduceProd", inputs=[a], attrs={"axes": axes, "keepdims": int(keepdims)}, outputs=["reduce_prod_out_gs"])[0]
return self.layer(
op="ReduceProd", inputs=[a], attrs={"axes": axes, "keepdims": int(keepdims)}, outputs=["reduce_prod_out_gs"]
)[0]


@gs.Graph.register()
Expand Down Expand Up @@ -69,8 +71,8 @@ def concat(self, inputs, axis=0):
partially_flattened = graph.reshape(graph.inputs[0], new_shape)

# Finally, set up the outputs and export.
flattened.name = "flattened" # Rename output tensor to make it easy to find.
flattened.dtype = np.float32 # NOTE: We must include dtype information for graph outputs
flattened.name = "flattened" # Rename output tensor to make it easy to find.
flattened.dtype = np.float32 # NOTE: We must include dtype information for graph outputs
partially_flattened.name = "partially_flattened"
partially_flattened.dtype = np.float32

Expand Down
2 changes: 1 addition & 1 deletion tools/onnx-graphsurgeon/onnx_graphsurgeon/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
from onnx_graphsurgeon.ir.tensor import Constant, Tensor, Variable
from onnx_graphsurgeon.util.exception import OnnxGraphSurgeonException

__version__ = "0.3.10"
__version__ = "0.3.11"
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

from onnx_graphsurgeon.ir.graph import Graph


class BaseExporter(object):
@staticmethod
def export_graph(graph: Graph):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,23 +42,27 @@ def export_tensor_proto(tensor: Constant) -> onnx.TensorProto:
onnx_tensor.name = tensor.name
return onnx_tensor


@staticmethod
def export_value_info_proto(tensor: Variable, do_type_check: bool) -> onnx.ValueInfoProto:
if do_type_check and tensor.dtype is None:
G_LOGGER.critical("Graph input and output tensors must include dtype information. Please set the dtype attribute for: {:}".format(tensor))
G_LOGGER.critical(
"Graph input and output tensors must include dtype information. Please set the dtype attribute for: {:}".format(
tensor
)
)

if tensor.dtype is not None:
onnx_tensor = onnx.helper.make_tensor_value_info(tensor.name, dtype_to_onnx(tensor.dtype), tensor.shape)
else:
onnx_tensor = onnx.helper.make_empty_tensor_value_info(tensor.name)
return onnx_tensor


@staticmethod
def export_node(node: Node, do_type_check: bool) -> onnx.NodeProto:
# Cannot pass in attrs directly as make_node will change the order
onnx_node = onnx.helper.make_node(node.op, inputs=[t.name for t in node.inputs], outputs=[t.name for t in node.outputs], name=node.name)
onnx_node = onnx.helper.make_node(
node.op, inputs=[t.name for t in node.inputs], outputs=[t.name for t in node.outputs], name=node.name
)
# Convert Tensors and Graphs to TensorProtos and GraphProtos respectively
for key, val in node.attrs.items():
if isinstance(val, Tensor):
Expand All @@ -68,7 +72,6 @@ def export_node(node: Node, do_type_check: bool) -> onnx.NodeProto:
onnx_node.attribute.extend([onnx.helper.make_attribute(key, val)])
return onnx_node


@staticmethod
def export_graph(graph: Graph, do_type_check=True) -> onnx.GraphProto:
"""
Expand All @@ -83,7 +86,9 @@ def export_graph(graph: Graph, do_type_check=True) -> onnx.GraphProto:
inputs = [OnnxExporter.export_value_info_proto(inp, do_type_check) for inp in graph.inputs]
outputs = [OnnxExporter.export_value_info_proto(out, do_type_check) for out in graph.outputs]
tensor_map = graph.tensors()
initializer = [OnnxExporter.export_tensor_proto(tensor) for tensor in tensor_map.values() if isinstance(tensor, Constant)]
initializer = [
OnnxExporter.export_tensor_proto(tensor) for tensor in tensor_map.values() if isinstance(tensor, Constant)
]

# Remove inputs and outputs to export ValueInfoProtos
for tensor in graph.inputs + graph.outputs:
Expand All @@ -93,9 +98,22 @@ def export_graph(graph: Graph, do_type_check=True) -> onnx.GraphProto:
# Omit tensors from value_info if we don't know their shape/dtype
def has_value_info(tensor):
return isinstance(tensor, Variable) and (tensor.dtype is not None or tensor.shape is not None)
value_info = [OnnxExporter.export_value_info_proto(tensor, do_type_check) for tensor in tensor_map.values() if has_value_info(tensor)]

return onnx.helper.make_graph(nodes=nodes, name=graph.name, inputs=inputs, outputs=outputs, initializer=initializer, doc_string=graph.doc_string, value_info=value_info)
value_info = [
OnnxExporter.export_value_info_proto(tensor, do_type_check)
for tensor in tensor_map.values()
if has_value_info(tensor)
]

return onnx.helper.make_graph(
nodes=nodes,
name=graph.name,
inputs=inputs,
outputs=outputs,
initializer=initializer,
doc_string=graph.doc_string,
value_info=value_info,
)


def export_onnx(graph: Graph, do_type_check=True, **kwargs) -> "onnx.ModelProto":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

from onnx_graphsurgeon.ir.graph import Graph


class BaseImporter(object):
@staticmethod
def import_graph(graph) -> Graph:
Expand Down
Loading

0 comments on commit b06a301

Please sign in to comment.