Op def output (#73)

* fix name of operator output in onnxop.inc and Operator.md

* remove Operators.md
This commit is contained in:
chentong319 2020-02-07 11:10:35 -05:00 committed by GitHub
parent ae297f14ee
commit 60ac8f081f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 184 additions and 162 deletions

View File

@ -13,6 +13,9 @@ ONNF modified this script to import ONNX specifications into ONNF. There are two
5. copy the two file into your ONNF: cp onnxop.inc your_ONNF/src/dialect/onnx/onnxop.inc; cp op_build_table.inc your_ONNF/src/builder 5. copy the two file into your ONNF: cp onnxop.inc your_ONNF/src/dialect/onnx/onnxop.inc; cp op_build_table.inc your_ONNF/src/builder
6. go to your ONNF and build 6. go to your ONNF and build
## Consistency
The Operators.md generated by gen_doc.py is copied into doc. Please refer to this specification, not the one in onnx github, to make sure operators are consistent in version with onnxop.inc.
## Customization ## Customization
In addition to following the ONNF specification, the modified gen_doc.py provides some mechanism for you to customize the output. In addition to following the ONNF specification, the modified gen_doc.py provides some mechanism for you to customize the output.
Several tables are defined at the beginning of the script: Several tables are defined at the beginning of the script:

View File

@ -120,6 +120,11 @@ def display_version_link(name, version): # type: (Text, int) -> Text
name_with_ver = '{}-{}'.format(name, version) name_with_ver = '{}-{}'.format(name, version)
return '<a href="{}#{}">{}</a>'.format(changelog_md, name_with_ver, name_with_ver) return '<a href="{}#{}">{}</a>'.format(changelog_md, name_with_ver, name_with_ver)
def get_unique_output_name(schema, name):
for input in schema.inputs :
if input.name == name :
return 'out_'+name
return name
def display_schema(schema, versions): # type: (OpSchema, Sequence[OpSchema]) -> Text def display_schema(schema, versions): # type: (OpSchema, Sequence[OpSchema]) -> Text
s = '' s = ''
@ -223,7 +228,7 @@ def display_schema(schema, versions): # type: (OpSchema, Sequence[OpSchema]) ->
option_str = " (variadic)" option_str = " (variadic)"
else: else:
option_str = " (variadic, heterogeneous)" option_str = " (variadic, heterogeneous)"
s += '<dt><tt>{}</tt>{} : {}</dt>\n'.format(output.name, option_str, output.typeStr) s += '<dt><tt>{}</tt>{} : {}</dt>\n'.format(get_unique_output_name(schema, output.name), option_str, output.typeStr)
s += '<dd>{}</dd>\n'.format(output.description) s += '<dd>{}</dd>\n'.format(output.description)
s += '</dl>\n' s += '</dl>\n'
@ -302,7 +307,6 @@ def collect_types(schema, input) :
return allowedTypeStr return allowedTypeStr
def gen_schema(schema) : def gen_schema(schema) :
skip_attr_gen = []
line_indent = ' ' line_indent = ' '
#s = 'def ONNX'+schema.name+str(schema.since_version)+'Op:ONNX_Op<"'+schema.name+'", \n' #s = 'def ONNX'+schema.name+str(schema.since_version)+'Op:ONNX_Op<"'+schema.name+'", \n'
@ -368,8 +372,7 @@ def gen_schema(schema) :
#TODO handle (variadic, heterogeneous)" #TODO handle (variadic, heterogeneous)"
t='' t=''
s+=':$'+input.name s+=':$'+input.name
if not schema.name in skip_attr_gen : s += gen_attr_ins(schema, isfirst)
s += gen_attr_ins(schema, isfirst)
s+= ');' s+= ');'
#output #output
@ -377,14 +380,14 @@ def gen_schema(schema) :
if schema.outputs: if schema.outputs:
for output in schema.outputs: for output in schema.outputs:
if output != schema.outputs[0] : if output != schema.outputs[0] :
s+= ', ' s+= ',\n '
#need to interpret output.typeStr #need to interpret output.typeStr
etypes=collect_types(schema, output) etypes=collect_types(schema, output)
if etypes == '': if etypes == '':
s+= 'AnyTypeOf<[AnyMemRef, AnyTensor]>' s+= 'AnyTypeOf<[AnyMemRef, AnyTensor]>'
else: else:
s+= 'TensorOf<['+etypes+']>' s+= 'TensorOf<['+etypes+']>'
s += ':$o_'+output.name s += ':$'+get_unique_output_name(schema, output.name)
s+= ');\n' s+= ');\n'
#s+= 'let hasCanonicalizer = 1;' #s+= 'let hasCanonicalizer = 1;'

File diff suppressed because it is too large Load Diff