2021-05-03 22:34:35 +00:00
|
|
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
|
|
|
# Licensed under the MIT License.
|
|
|
|
|
|
|
|
|
|
import argparse
|
2022-09-20 21:24:59 +00:00
|
|
|
import contextlib
|
2021-05-03 22:34:35 +00:00
|
|
|
import os
|
|
|
|
|
import sys
|
|
|
|
|
import typing
|
|
|
|
|
|
|
|
|
|
# the import of FbsTypeInfo sets up the path so we can import ort_flatbuffers_py
|
2022-09-20 21:24:59 +00:00
|
|
|
from util.ort_format_model.types import FbsTypeInfo # isort:skip
|
|
|
|
|
import ort_flatbuffers_py.fbs as fbs # isort:skip
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class OrtFormatModelDumper:
|
2022-04-26 16:35:16 +00:00
|
|
|
"Class to dump an ORT format model."
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
def __init__(self, model_path: str):
|
2022-04-26 16:35:16 +00:00
|
|
|
"""
|
2021-05-03 22:34:35 +00:00
|
|
|
Initialize ORT format model dumper
|
|
|
|
|
:param model_path: Path to model
|
2022-04-26 16:35:16 +00:00
|
|
|
"""
|
2023-03-24 22:29:03 +00:00
|
|
|
self._file = open(model_path, "rb").read() # noqa: SIM115
|
2021-05-03 22:34:35 +00:00
|
|
|
self._buffer = bytearray(self._file)
|
|
|
|
|
if not fbs.InferenceSession.InferenceSession.InferenceSessionBufferHasIdentifier(self._buffer, 0):
|
2023-03-24 22:29:03 +00:00
|
|
|
raise RuntimeError(f"File does not appear to be a valid ORT format model: '{model_path}'")
|
2022-09-20 21:24:59 +00:00
|
|
|
self._inference_session = fbs.InferenceSession.InferenceSession.GetRootAsInferenceSession(self._buffer, 0)
|
|
|
|
|
self._model = self._inference_session.Model()
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
def _dump_initializers(self, graph: fbs.Graph):
|
2022-04-26 16:35:16 +00:00
|
|
|
print("Initializers:")
|
2021-05-03 22:34:35 +00:00
|
|
|
for idx in range(0, graph.InitializersLength()):
|
|
|
|
|
tensor = graph.Initializers(idx)
|
|
|
|
|
dims = []
|
|
|
|
|
for dim in range(0, tensor.DimsLength()):
|
2023-07-21 19:53:41 +00:00
|
|
|
dims.append(tensor.Dims(dim)) # noqa: PERF401
|
2021-05-03 22:34:35 +00:00
|
|
|
|
2022-04-26 16:35:16 +00:00
|
|
|
print(f"{tensor.Name().decode()} data_type={tensor.DataType()} dims={dims}")
|
|
|
|
|
print("--------")
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
def _dump_nodeargs(self, graph: fbs.Graph):
|
2022-04-26 16:35:16 +00:00
|
|
|
print("NodeArgs:")
|
2021-05-03 22:34:35 +00:00
|
|
|
for idx in range(0, graph.NodeArgsLength()):
|
|
|
|
|
node_arg = graph.NodeArgs(idx)
|
|
|
|
|
type = node_arg.Type()
|
|
|
|
|
if not type:
|
|
|
|
|
# NodeArg for optional value that does not exist
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
type_str = FbsTypeInfo.typeinfo_to_str(type)
|
|
|
|
|
value_type = type.ValueType()
|
|
|
|
|
value = type.Value()
|
|
|
|
|
dims = None
|
|
|
|
|
if value_type == fbs.TypeInfoValue.TypeInfoValue.tensor_type:
|
|
|
|
|
tensor_type_and_shape = fbs.TensorTypeAndShape.TensorTypeAndShape()
|
|
|
|
|
tensor_type_and_shape.Init(value.Bytes, value.Pos)
|
|
|
|
|
shape = tensor_type_and_shape.Shape()
|
|
|
|
|
if shape:
|
|
|
|
|
dims = []
|
|
|
|
|
for dim in range(0, shape.DimLength()):
|
|
|
|
|
d = shape.Dim(dim).Value()
|
|
|
|
|
if d.DimType() == fbs.DimensionValueType.DimensionValueType.VALUE:
|
|
|
|
|
dims.append(str(d.DimValue()))
|
|
|
|
|
elif d.DimType() == fbs.DimensionValueType.DimensionValueType.PARAM:
|
|
|
|
|
dims.append(d.DimParam().decode())
|
|
|
|
|
else:
|
2022-04-26 16:35:16 +00:00
|
|
|
dims.append("?")
|
2021-05-03 22:34:35 +00:00
|
|
|
else:
|
|
|
|
|
dims = None
|
|
|
|
|
|
2022-04-26 16:35:16 +00:00
|
|
|
print(f"{node_arg.Name().decode()} type={type_str} dims={dims}")
|
|
|
|
|
print("--------")
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
def _dump_node(self, node: fbs.Node):
|
|
|
|
|
optype = node.OpType().decode()
|
2022-04-26 16:35:16 +00:00
|
|
|
domain = node.Domain().decode() or "ai.onnx" # empty domain defaults to ai.onnx
|
2022-09-20 21:24:59 +00:00
|
|
|
since_version = node.SinceVersion()
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
inputs = [node.Inputs(i).decode() for i in range(0, node.InputsLength())]
|
|
|
|
|
outputs = [node.Outputs(i).decode() for i in range(0, node.OutputsLength())]
|
2022-04-26 16:35:16 +00:00
|
|
|
print(
|
2022-09-20 21:24:59 +00:00
|
|
|
f"{node.Index()}:{node.Name().decode()}({domain}:{optype}:{since_version}) "
|
|
|
|
|
f'inputs=[{",".join(inputs)}] outputs=[{",".join(outputs)}]'
|
2022-04-26 16:35:16 +00:00
|
|
|
)
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
def _dump_graph(self, graph: fbs.Graph):
|
2022-04-26 16:35:16 +00:00
|
|
|
"""
|
2021-05-03 22:34:35 +00:00
|
|
|
Process one level of the Graph, descending into any subgraphs when they are found
|
2022-04-26 16:35:16 +00:00
|
|
|
"""
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
self._dump_initializers(graph)
|
|
|
|
|
self._dump_nodeargs(graph)
|
2022-04-26 16:35:16 +00:00
|
|
|
print("Nodes:")
|
2021-05-03 22:34:35 +00:00
|
|
|
for i in range(0, graph.NodesLength()):
|
|
|
|
|
node = graph.Nodes(i)
|
|
|
|
|
self._dump_node(node)
|
|
|
|
|
|
|
|
|
|
# Read all the attributes
|
|
|
|
|
for j in range(0, node.AttributesLength()):
|
|
|
|
|
attr = node.Attributes(j)
|
|
|
|
|
attr_type = attr.Type()
|
|
|
|
|
if attr_type == fbs.AttributeType.AttributeType.GRAPH:
|
2022-04-26 16:35:16 +00:00
|
|
|
print(f"## Subgraph for {node.OpType().decode()}.{attr.Name().decode()} ##")
|
2021-05-03 22:34:35 +00:00
|
|
|
self._dump_graph(attr.G())
|
2022-04-26 16:35:16 +00:00
|
|
|
print(f"## End {node.OpType().decode()}.{attr.Name().decode()} Subgraph ##")
|
2021-05-03 22:34:35 +00:00
|
|
|
elif attr_type == fbs.AttributeType.AttributeType.GRAPHS:
|
|
|
|
|
# the ONNX spec doesn't currently define any operators that have multiple graphs in an attribute
|
|
|
|
|
# so entering this 'elif' isn't currently possible
|
2022-04-26 16:35:16 +00:00
|
|
|
print(f"## Subgraphs for {node.OpType().decode()}.{attr.Name().decode()} ##")
|
2021-05-03 22:34:35 +00:00
|
|
|
for k in range(0, attr.GraphsLength()):
|
2022-04-26 16:35:16 +00:00
|
|
|
print(f"## Subgraph {k} ##")
|
2021-05-03 22:34:35 +00:00
|
|
|
self._dump_graph(attr.Graphs(k))
|
2022-04-26 16:35:16 +00:00
|
|
|
print(f"## End Subgraph {k} ##")
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
def dump(self, output: typing.IO):
|
2022-09-20 21:24:59 +00:00
|
|
|
with contextlib.redirect_stdout(output):
|
|
|
|
|
print(f"ORT format version: {self._inference_session.OrtVersion().decode()}")
|
|
|
|
|
print("--------")
|
2021-05-03 22:34:35 +00:00
|
|
|
|
2022-09-20 21:24:59 +00:00
|
|
|
graph = self._model.Graph()
|
|
|
|
|
self._dump_graph(graph)
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def parse_args():
|
2022-04-26 16:35:16 +00:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
|
os.path.basename(__file__), description="Dump an ORT format model. Output is to <model_path>.txt"
|
|
|
|
|
)
|
|
|
|
|
parser.add_argument("--stdout", action="store_true", help="Dump to stdout instead of writing to file.")
|
|
|
|
|
parser.add_argument("model_path", help="Path to ORT format model")
|
2021-05-03 22:34:35 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
|
if not os.path.isfile(args.model_path):
|
2022-04-26 16:35:16 +00:00
|
|
|
parser.error(f"{args.model_path} is not a file.")
|
2021-05-03 22:34:35 +00:00
|
|
|
|
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
|
args = parse_args()
|
|
|
|
|
d = OrtFormatModelDumper(args.model_path)
|
|
|
|
|
|
|
|
|
|
if args.stdout:
|
|
|
|
|
d.dump(sys.stdout)
|
|
|
|
|
else:
|
|
|
|
|
output_filename = args.model_path + ".txt"
|
|
|
|
|
with open(output_filename, "w", encoding="utf-8") as ofile:
|
|
|
|
|
d.dump(ofile)
|
|
|
|
|
|
|
|
|
|
|
2022-04-26 16:35:16 +00:00
|
|
|
if __name__ == "__main__":
|
2021-05-03 22:34:35 +00:00
|
|
|
main()
|