add mlir file support,add test

This commit is contained in:
colin 2020-08-11 17:18:53 +08:00
parent dfa6433952
commit df31b3aa14
9 changed files with 2502 additions and 2314 deletions

View File

@ -8,7 +8,7 @@ TextMate grammar used is based on the one from the official [LLVM repository](ht
## Features ## Features
- Syntax highlighting - Syntax highlighting, Support .inc .mlir .td
## Build ## Build

BIN
mlir-highlight-0.0.3.vsix Normal file

Binary file not shown.

View File

@ -1,8 +1,8 @@
{ {
"name": "mlir-tablegen", "name": "mlir-highlight",
"displayName": "MLIR TableGen", "displayName": "MLIR highlight",
"description": "Syntax highlighting for TableGen.", "description": "Syntax highlighting for MLIR.",
"version": "0.0.2", "version": "0.0.3",
"publisher": "colin-liang", "publisher": "colin-liang",
"engines": { "engines": {
"vscode": "^1.22.0" "vscode": "^1.22.0"
@ -15,7 +15,8 @@
"mlir-tblgen", "mlir-tblgen",
"tblgen", "tblgen",
"llvm", "llvm",
"tablegen" "tablegen",
"mlir"
], ],
"contributes": { "contributes": {
"languages": [ "languages": [
@ -41,6 +42,17 @@
".td" ".td"
], ],
"configuration": "./language-configuration.json" "configuration": "./language-configuration.json"
},
{
"id": "mlir",
"aliases": [
"MLIR",
"mlir"
],
"extensions": [
".mlir"
],
"configuration": "./language-configuration.json"
} }
], ],
"grammars": [ "grammars": [
@ -53,6 +65,11 @@
"language": "tablegen", "language": "tablegen",
"scopeName": "source.tablegen", "scopeName": "source.tablegen",
"path": "./syntaxes/tablegen.tmLanguage" "path": "./syntaxes/tablegen.tmLanguage"
},
{
"language": "mlir",
"scopeName": "source.mlir",
"path": "./syntaxes/mlir.json"
} }
] ]
}, },

590
syntaxes/mlir.json Normal file
View File

@ -0,0 +1,590 @@
{
"fileTypes": [
"mlir"
],
"repository": {
"attribute": {
"match": "\\W[\\w_][\\w\\d_.$]*\\s*=",
"name": "meta.attribute.mlir"
},
"branch_target": {
"match": "\\^bb[\\w\\d_$\\.-]+",
"name": "entity.name.label.mlir"
},
"comment": {
"match": "\/\/.*$",
"name": "comment.line.double-slash.mlir"
},
"identifier": {
"match": "[\\%#@][\\w_][\\w\\d_.$]*",
"captures": {
"0": {
"name": "variable.mlir"
}
},
"name": "meta.identifier.mlir"
},
"numbers": {
"match": "(?<=\\W|e|E)(-)?([0-9]+)(((x|-)[0-9]+)*|\\.[0-9]+)(?=\\W|e|E)",
"name": "constant.numeric.mlir"
},
"string": {
"end": "\"",
"begin": "\"",
"beginCaptures": {
"0": {
"name": "punctuation.definition.string.begin.mlir"
}
},
"patterns": [
{
"match": "\\\\[nt\"]",
"name": "constant.character.escape.mlir"
},
{
"match": "\\\\.",
"name": "invalid.illegal.mlir"
}
],
"endCaptures": {
"0": {
"name": "punctuation.definition.string.end.mlir"
}
},
"name": "string.quoted.double.mlir"
},
"types": {
"match": "\\b(index|i[1-9][0-9]*|f16|bf16|f32|f64|u8|ui32|si32|memref|tensor|vector)\\b",
"captures": {
"1": {
"name": "storage.type.mlir"
}
},
"name": "meta.types.simple.mlir"
},
"memref-size": {
"match": "(?<=<)\\s*(((\\?|[1-9][0-9]*)\\s*x\\s*)*)(i[1-9][0-9]*|f16|bf16|f32|f64|u8|ui32|si32|!quant\\.uniform|vector|tensor|memref|!)\\b",
"captures":{
"0":{
"name": "storage.type.mlir.size"
},
"1":{
"name":"constant.numeric.size.mlir"
}
},
"name": "meta.type.mlir.size"
},
"affineOps": {
"match": "\\baffine\\.(for|apply|if|load|store|dma_start|dma_wait|min|max|parallel|prefetch)\\b",
"name": "entity.name.function.mlir.affineOps"
},
"affineOpsP": {
"match": "(?<=\\W)\"affine\\.(for|apply|if|load|store|dma_start|dma_wait|min|max|parallel|prefetch)\"(?=\\W)",
"name": "entity.name.function.mlir.affineOpsP"
},
"affineStructures": {
"match": "\\baffine_(map|set)\\b",
"name": "entity.name.function.mlir.affineStructures"
},
"llvmType": {
"match": "(^!llvm|\\s!llvm)[\\.<]",
"captures": {
"1": {
"name": "variable.mlir"
}
},
"name": "meta.types.llvm.mlir"
},
"llvmFunc": {
"match": "\\bllvm\\.(?=func)",
"name": "keyword.function.llvm"
},
"llvmReturn": {
"match": "\\bllvm\\.return\\b",
"name": "keyword.return.llvm"
},
"llvmIntArith": {
"match": "\\bllvm\\.(add|sub|mul|udiv|sdiv|urem|srem)\\b",
"name": "entity.name.function.mlir.llvmIntArithm"
},
"llvmFloatArith": {
"match": "\\bllvm\\.(fadd|fsub|fmul|fdiv|frem)\\b",
"name": "entity.name.function.mlir.llvmFloatArith"
},
"llvmMemOp": {
"match": "\\bllvm\\.(alloca|getelementptr|load|store)\\b",
"name": "entity.name.function.mlir.llvmMemOp"
},
"llvmAggregateOp": {
"match": "\\bllvm\\.(extractvalue|insertvalue)\\b",
"name": "entity.name.function.mlir.llvmAggregateOp"
},
"llvmTerminatorOp": {
"match": "\\bllvm\\.(br|cond_br|call)\\b",
"name": "entity.name.function.mlir.llvmTerminatorOp"
},
"llvmComparison": {
"match": "\\bllvm\\.(eq|ne|slt|sle|sgt|sge|ult|ule|ugt|uge|bitcast|select|icmp)\\b",
"name": "entity.name.function.mlir.llvmComparison"
},
"llvmOps":{
"match": "\\bllvm\\.(extractelement|insertelement|shufflevector)\\b",
"name":"entity.name.function.mlir.llvmOps"
},
"llvmMLIR": {
"match": "\\b(llvm\\.mlir\\.)((addressof|constant|global|null|undef))",
"captures": {
"1": {
"name": "keyword.other.llvm"
},
"2": {
"name": "entity.name.function.mlir.auxilary"
}
},
"name": "meta.llvm.mlirAuxilary"
},
"gpuFunc": {
"match": "\\bgpu\\.(?=func)",
"name": "keyword.function.mlir.gpu"
},
"gpuReturn": {
"match": "\\bgpu\\.return\\b",
"name": "keyword.return.mlir.gpu"
},
"gpuModules": {
"match": "\\bgpu\\.(module|container_module|kernel_module)\\b",
"name": "keyword.other.mlir.gpu-modules"
},
"gpuKernel": {
"match": "\\bgpu\\.kernel\\b",
"name": "keyword.other.mlir.gpu-kernel"
},
"gpuOpsP": {
"match": "(?<=\\W)\"gpu\\.(launch_func|thread_id|block_dim|block_id|grid_dim|all_reduce|yield|shuffle|barrier)\"(?=\\W)",
"name": "entity.name.function.mlir.gpuOpsP"
},
"gpuOps":{
"match": "\\bgpu\\.(launch_func|thread_id|block_dim|block_id|grid_dim|all_reduce|yield|shuffle|barrier)\\b",
"name": "entity.name.function.mlir.gpuOpsP"
},
"gpuLaunchAndTerminator": {
"match": "\\bgpu\\.(launch|terminator)\\b",
"name": "keyword.other.mlir.gpu-launchAndTerminator"
},
"gpuTestPromoteWorkgroup": {
"match": "\\bgpu\\.test_promote_workgroup\\b",
"name": "keyword.other.mlir.gpu-testPromoteWorkgroup"
},
"nvvmID": {
"match": "\\bnvvm\\.read\\.ptx\\.sreg\\.(tid|ntid|ctaid|nctaid)\\.(x|y|z)\\b",
"name": "entity.name.function.mlir.nvvm-id"
},
"nvvmLaneId": {
"match": "\\bnvvm\\.read\\.ptx\\.sreg\\.laneid\\b",
"name": "entity.name.function.mlir.nvvm-laneid"
},
"nvvmBarrier0": {
"match": "\\bnvvm\\.barrier0\\b",
"name": "entity.name.function.mlir.nvvm-barrier0"
},
"nvvmMma": {
"match": "\\bnvvm\\.mma\\.sync\\b",
"name": "entity.name.function.mlir.nvvm-mma"
},
"nvvmShflBfly": {
"match": "\\bnvvm\\.shfl\\.sync\\.bfly\\b",
"name": "entity.name.function.mlir.nvvm-shflbfly"
},
"nvvmVoteBallot": {
"match": "\\bnvvm\\.vote\\.ballot\\.sync\\b",
"name": "entity.name.function.mlir.nvvm-voteballot"
},
"nvvmWarpSize": {
"match": "\\bnvvm\\.read\\.ptx\\.sreg\\.warpsize\\b",
"name": "entity.name.function.mlir.warpsize"
},
"tflMath": {
"match": "(?<=\\W)\"tfl\\.(abs|add_n|add|cos|div|exp|floor_div|floor_mod|floor|log|log_softmax|mul|pow|round|rsqrt|sin|softmax|sqrt|square|squared_difference|sub|sum|tanh)\"(?=\\W)",
"name": "entity.name.function.mlir.tfl_math"
},
"tflLogic":{
"match": "(?<=\\W)\"tfl\\.(equal|greater_equal|greater|less_equal|less|logical_and|logical_not|logical_or|neg|not_equal|select|select_v2|where)\"(?=\\W)",
"name": "entity.name.function.mlir.tflLogic"
},
"tflStats": {
"match": "(?<=\\W)\"tfl\\.(arg_max|arg_min|average_pool_2d|max_pool_2d|max_pooling_with_argmax_2d|max_unpooling_2d|maximum|mean|minimum|non_max_suppression_(v4|v5))\"(?=\\W)",
"name": "entity.name.function.mlir.tflStats"
},
"tflConv": {
"match": "(?<=\\W)\"tfl\\.(conv_2d|convolution_2d_transpose_bias|depthwise_conv_2d|transpose_conv)\"(?=\\W)",
"name": "entity.name.function.mlir.tflConv"
},
"tflLSTM": {
"match": "(?<=\\W)\"tfl\\.(basic_lstm|lstm|unidirectional_sequence_lstm)\"(?=\\W)",
"name": "entity.name.function.mlir.tflLSTM"
},
"tflPseudo": {
"match": "(?<=\\W)\"tfl\\.(pseudo_const|pseudo_qconst|pseudo_sparse_const|pseudo_sparse_qconst)\"(?=\\W)",
"name": "entity.name.function.mlir.tflPseudo"
},
"tflTransformations": {
"match": "(?<=\\W)\"tfl\\.(batch_to_space_nd|depth_to_space|expand_dims|resize_bilinear|resize_nearest_neighbor|space_to_batch_nd|space_to_depth|sparse_to_dense)\"(?=\\W)",
"name": "entity.name.function.mlir.tflReshaping"
},
"tflRELU": {
"match": "(?<=\\W)\"tfl\\.(elu|leaky_relu|prelu|relu_n1_to_1|relu6|relu)\"(?=\\W)",
"name": "entity.name.function.mlir.tflRELU"
},
"tflMatrix": {
"match": "(?<=\\W)\"tfl\\.(matrix_diag|matrix_set_diag|mirror_pad|pad|padv2|rank|transpose)\"(?=\\W)",
"name": "entity.name.function.mlir.tflMatrix"
},
"tflOps": {
"match": "(?<=\\W)\"tfl\\.(cast|ceil|concatenation|densify|dequantize|fill|gather_nd|gather|logistic|pack|quantize|range|reshape|svdf|shape|slice|split|split_v|squeeze|tile|unique|unpack|while|yield)\"(?=\\W)",
"name": "entity.name.function.mlir.tflOps"
},
"tflLongOps": {
"match": "(?<=\\W)\"tfl\\.(embedding_lookup|external_const|fake_quant|fully_connected|hard_swish|NumericVerify|one_hot|segment_sum|strided_slice|topk_v2|zeros_like)\"(<=\\W)",
"name": "entity.name.function.mlir.tflLongOps"
},
"tflNormalization": {
"match": "(?<=\\W)\"tfl\\.(l2_normalization|local_response_normalization)\"(?=\\W)",
"name": "entity.name.function.mlir.tflNormalization"
},
"tflReduce": {
"match": "(?<=\\W)\"tfl\\.(reduce_any|reduce_max|reduce_min|reduce_prod)\"(?=\\W)",
"name": "entity.name.function.mlir.tflReduce"
},
"tflSequence": {
"match": "(?<=\\W)\"tfl\\.(reverse_sequence|reverse_v2|unidirectional_sequence_rnn)\"(?=\\W)",
"name": "entity.name.function.mlir.tflSequence"
},
"vectorOps":{
"match": "\\Wvector\\.(broadcast|contract|vectorfma|print|constant_mask|create_mask|shuffle|matrix_multiply|outerproduct|reduction|strided_slice|transpose|type_cast)\\b",
"name":"entity.name.function.mlir.vectorOps"
},
"vectorExtract":{
"match" : "\\Wvector\\.(extract|extractelement|extract_slices)\\b",
"name": "entity.name.function.mlir.vectorExtract"
},
"vectorInsert":{
"match" : "\\Wvector\\.(insert|insertelement|insert_slices|insert_strided_slice)\\b",
"name" : "entity.name.function.mlir.vectorInsert"
},
"vectorReshape":{
"match" : "\\Wvector\\.(reshape|shape_cast)\\b",
"name": "entity.name.function.mlir.vectorReshape"
},
"vectorTransfer":{
"match" : "\\Wvector\\.transfer_(read|write)\\b",
"name" : "entity.name.function.mlir.vectorTransfer"
},
"vectorTuple":{
"match" : "\\Wvector\\.(tuple|tuple_get)\\b",
"name" : "entity.name.function.mlir.vectorTuple"
},
"loopOps":{
"match" : "\\bloop\\.(for|if|parallel|reduce|yield)\\b",
"name": "entity.name.function.mlir.loopOps"
},
"tileFunctions":{
"match" : "\\btile\\.(constant|contract|index|reshape)\\b",
"name": "entity.name.function.mlir.tileFunctions"
},
"tileQuotedFunctions":{
"match" : "(?<=\\W)\"tile\\.(constant|contract|index|reshape)\"(?=\\W)",
"name": "entity.name.function.mlir.tileFunctions"
},
"tileKeywords": {
"match": "\\btile\\.(name)\\b",
"name": "keyword.other.mlir.tileKeywords"
},
"eltwiseFunctions":{
"match" : "(?<=\\W)\"eltwise\\.(add|div|sconst|cmp_lt|select|sub|exp|cast|neg|mul|sqrt|ident)\"(?=\\W)",
"name": "entity.name.function.mlir.tileFunctions"
},
"CHECK": {
"match": "(\/\/)\\s*(CHECK\\s*:|CHECK-\\w+\\s*:)(.*)$",
"captures": {
"1": {
"name": "comment.line.double-slash.mlir"
},
"2": {
"name": "comment.other.CHECK.mlir"
},
"3": {
"patterns": [
{
"include": "#CHECK-CODE"
}
]
},
"name": "comment.other.CHECK.mlir"
}
},
"CHECK-CODE": {
"match": "([^0-9a-zA-Z%\"@]*)?([0-9a-zA-Z\"\\.%_\\-@]*)([^0-9a-zA-Z\"\\.%_\\-].*)?$",
"captures": {
"1": {
"name": "comment.line.double-slash.mlir"
},
"2": {
"patterns": [
{
"include": "$self"
},
{
"match": "(\\b|x)(index|i[1-9][0-9]*|f16|bf16|f32|f64|u8|memref|tensor|vector|func)\\b",
"captures": {
"1": {
"name": "comment.line.double-slash.mlir"
},
"2": {
"name": "storage.type.mlir"
}
}
},
{
"match": "\\b([0-9]+)(x.*)?",
"captures": {
"1": {
"name": "constant.numeric.mlir"
},
"2": {
"patterns": [
{
"include": "$self"
}
]
}
}
},
{
"match": "(.*)",
"name": "comment.line.double-slash.mlir"
}
]
},
"3": {
"patterns": [
{
"include": "#CHECK-CODE"
}
]
}
}
}
},
"patterns": [
{
"match": "\\b(func)\\b\\s*(@[\\w_][\\w\\d_.$]*)",
"captures": {
"1": {
"name": "keyword.function.mlir"
},
"2": {
"name": "entity.name.function.mlir"
}
},
"name": "support.function.mlir"
},
{
"match": "\\b(attributes|br|call|constant|loc|return)\\b",
"name": "keyword.module.mlir"
},
{
"include": "#identifier"
},
{
"include": "#branch_target"
},
{
"include": "#attribute"
},
{
"include": "#memref-size"
},
{
"include": "#numbers"
},
{
"include": "#affineOps"
},
{
"include": "#affineOpsP"
},
{
"include": "#affineStructures"
},
{
"include": "#else"
},
{
"include": "#CHECK"
},
{
"include": "#llvmType"
},
{
"include": "#llvmFunc"
},
{
"include": "#llvmReturn"
},
{
"include": "#llvmIntArith"
},
{
"include": "#llvmFloatArith"
},
{
"include": "#llvmMemOp"
},
{
"include": "#llvmAggregateOp"
},
{
"include": "#llvmTerminatorOp"
},
{
"include": "#llvmComparison"
},
{
"include": "#llvmMLIR"
},
{
"include": "#llvmOps"
},
{
"include": "#gpuFunc"
},
{
"include": "#gpuReturn"
},
{
"include": "#gpuModules"
},
{
"include": "#gpuKernel"
},
{
"include": "#gpuOpsP"
},
{
"include": "#gpuOps"
},
{
"include": "#gpuLaunchAndTerminator"
},
{
"include": "#gpuTestPromoteWorkgroup"
},
{
"include": "#nvvmID"
},
{
"include": "#nvvmLaneId"
},
{
"include": "#nvvmBarrier0"
},
{
"include": "#nvvmMma"
},
{
"include": "#nvvmShflBfly"
},
{
"include": "#nvvmVoteBallot"
},
{
"include": "#nvvmWarpSize"
},
{
"include": "#tileFunctions"
},
{
"include": "#tileQuotedFunctions"
},
{
"include": "#tileKeywords"
},
{
"include": "#eltwiseFunctions"
},
{
"include": "#tflMath"
},
{
"include": "#tflLogic"
},
{
"include": "#tflStats"
},
{
"include": "#tflConv"
},
{
"include": "#tflLSTM"
},
{
"include": "#tflPseudo"
},
{
"include": "#tflTransformations"
},
{
"include": "#tflRELU"
},
{
"include": "#tflMatrix"
},
{
"include": "#tflOps"
},
{
"include": "#tflLongOps"
},
{
"include": "#tflNormalization"
},
{
"include": "#tflReduce"
},
{
"include": "#tflSequence"
},
{
"include": "#vectorOps"
},
{
"include": "#vectorExtract"
},
{
"include":"#vectorInsert"
},
{
"include":"#vectorReshape"
},
{
"include":"#vectorTransfer"
},
{
"include": "#vectorTuple"
},
{
"include": "#loopOps"
},
{
"include": "#comment"
},
{
"include": "#types"
},
{
"include": "#string"
}
],
"name": "MLIR",
"scopeName": "source.mlir"
}

21
test/a.mlir Normal file
View File

@ -0,0 +1,21 @@
module {
func @multiply_transpose(%arg0: tensor<*xf64>, %arg1: tensor<*xf64>) -> tensor<*xf64> {
%0 = toy.transpose(%arg0 : tensor<*xf64>) to tensor<*xf64>
%1 = toy.transpose(%arg1 : tensor<*xf64>) to tensor<*xf64>
%2 = toy.mul %0, %1 : tensor<*xf64>
toy.return %2 : tensor<*xf64>
}
func @main() {
%0 = toy.constant dense<[[1.000000e+00, 2.000000e+00, 3.000000e+00], [4.000000e+00, 5.000000e+00, 6.000000e+00]]> : tensor<2x3xf64>
%1 = toy.constant dense<[1.000000e+00, 2.000000e+00, 3.000000e+00, 4.000000e+00, 5.000000e+00, 6.000000e+00]> : tensor<6xf64>
%2 = toy.reshape(%1 : tensor<6xf64>) to tensor<2x3xf64>
%3 = toy.generic_call @multiply_transpose(%0, %2) : (tensor<2x3xf64>, tensor<2x3xf64>) -> tensor<*xf64>
%4 = toy.generic_call @multiply_transpose(%2, %0) : (tensor<2x3xf64>, tensor<2x3xf64>) -> tensor<*xf64>
%5 = toy.generic_call @multiply_transpose(%2, %3) : (tensor<2x3xf64>, tensor<*xf64>) -> tensor<*xf64>
%6 = toy.transpose(%0 : tensor<2x3xf64>) to tensor<*xf64>
%7 = toy.generic_call @multiply_transpose(%6, %3) : (tensor<*xf64>, tensor<*xf64>) -> tensor<*xf64>
toy.return
}
}

251
test/b.td Normal file
View File

@ -0,0 +1,251 @@
//===- Ops.td - Toy dialect operation definitions ----------*- tablegen -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// Defines the operations of the Toy dialect.
//
//===----------------------------------------------------------------------===//
#ifndef TOY_OPS
#define TOY_OPS
include "mlir/IR/OpBase.td"
include "mlir/Interfaces/SideEffectInterfaces.td"
// Provide a definition of the 'toy' dialect in the ODS framework so that we
// can define our operations.
def Toy_Dialect : Dialect {
let name = "toy";
let cppNamespace = "toy";
}
// Base class for toy dialect operations. This operation inherits from the base
// `Op` class in OpBase.td, and provides:
// * The parent dialect of the operation.
// * The mnemonic for the operation, or the name without the dialect prefix.
// * A list of traits for the operation.
class Toy_Op<string mnemonic, list<OpTrait> traits = []> :
Op<Toy_Dialect, mnemonic, traits>;
//===----------------------------------------------------------------------===//
// Toy Operations
//===----------------------------------------------------------------------===//
// We define a toy operation by inheriting from our base 'Toy_Op' class above.
// Here we provide the mnemonic and a list of traits for the operation. The
// constant operation is marked as 'NoSideEffect' as it is a pure operation
// and may be removed if dead.
def ConstantOp : Toy_Op<"constant", [NoSideEffect]> {
// Provide a summary and description for this operation. This can be used to
// auto-generate documentation of the operations within our dialect.
let summary = "constant";
let description = [{
Constant operation turns a literal into an SSA value. The data is attached
to the operation as an attribute. For example:
```mlir
%0 = toy.constant dense<[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]>
: tensor<2x3xf64>
```
}];
// The constant operation takes an attribute as the only input.
let arguments = (ins F64ElementsAttr:$value);
// The constant operation returns a single value of TensorType.
let results = (outs F64Tensor);
// Specify a parser and printer method.
let parser = [{ return ::parseConstantOp(parser, result); }];
let printer = [{ return ::print(p, *this); }];
// Add custom build methods for the constant operation. These method populates
// the `state` that MLIR uses to create operations, i.e. these are used when
// using `builder.create<ConstantOp>(...)`.
let builders = [
// Build a constant with a given constant tensor value.
OpBuilder<"OpBuilder &builder, OperationState &state, "
"DenseElementsAttr value", [{
build(builder, state, value.getType(), value);
}]>,
// Build a constant with a given constant floating-point value.
OpBuilder<"OpBuilder &builder, OperationState &state, double value">
];
// Invoke a static verify method to verify this constant operation.
let verifier = [{ return ::verify(*this); }];
}
def AddOp : Toy_Op<"add"> {
let summary = "element-wise addition operation";
let description = [{
The "add" operation performs element-wise addition between two tensors.
The shapes of the tensor operands are expected to match.
}];
let arguments = (ins F64Tensor:$lhs, F64Tensor:$rhs);
let results = (outs F64Tensor);
// Specify a parser and printer method.
let parser = [{ return ::parseBinaryOp(parser, result); }];
let printer = [{ return ::printBinaryOp(p, *this); }];
// Allow building an AddOp with from the two input operands.
let builders = [
OpBuilder<"OpBuilder &b, OperationState &state, Value lhs, Value rhs">
];
}
def GenericCallOp : Toy_Op<"generic_call"> {
let summary = "generic call operation";
let description = [{
Generic calls represent calls to a user defined function that needs to
be specialized for the shape of its arguments. The callee name is attached
as a symbol reference via an attribute. The arguments list must match the
arguments expected by the callee. For example:
```mlir
%4 = toy.generic_call @my_func(%1, %3)
: (tensor<2x3xf64>, tensor<2x3xf64>) -> tensor<*xf64>
```
This is only valid if a function named "my_func" exists and takes two
arguments.
}];
// The generic call operation takes a symbol reference attribute as the
// callee, and inputs for the call.
let arguments = (ins FlatSymbolRefAttr:$callee, Variadic<F64Tensor>:$inputs);
// The generic call operation returns a single value of TensorType.
let results = (outs F64Tensor);
// Specialize assembly printing and parsing using a declarative format.
let assemblyFormat = [{
$callee `(` $inputs `)` attr-dict `:` functional-type($inputs, results)
}];
// Add custom build methods for the generic call operation.
let builders = [
OpBuilder<"OpBuilder &builder, OperationState &state, "
"StringRef callee, ArrayRef<Value> arguments">
];
}
def MulOp : Toy_Op<"mul"> {
let summary = "element-wise multiplication operation";
let description = [{
The "mul" operation performs element-wise multiplication between two
tensors. The shapes of the tensor operands are expected to match.
}];
let arguments = (ins F64Tensor:$lhs, F64Tensor:$rhs);
let results = (outs F64Tensor);
// Specify a parser and printer method.
let parser = [{ return ::parseBinaryOp(parser, result); }];
let printer = [{ return ::printBinaryOp(p, *this); }];
// Allow building a MulOp with from the two input operands.
let builders = [
OpBuilder<"OpBuilder &b, OperationState &state, Value lhs, Value rhs">
];
}
def PrintOp : Toy_Op<"print"> {
let summary = "print operation";
let description = [{
The "print" builtin operation prints a given input tensor, and produces
no results.
}];
// The print operation takes an input tensor to print.
let arguments = (ins F64Tensor:$input);
let assemblyFormat = "$input attr-dict `:` type($input)";
}
def ReshapeOp : Toy_Op<"reshape"> {
let summary = "tensor reshape operation";
let description = [{
Reshape operation is transforming its input tensor into a new tensor with
the same number of elements but different shapes. For example:
```mlir
%0 = toy.reshape (%arg1 : tensor<10xf64>) to tensor<5x2xf64>
```
}];
let arguments = (ins F64Tensor:$input);
// We expect that the reshape operation returns a statically shaped tensor.
let results = (outs StaticShapeTensorOf<[F64]>);
let assemblyFormat = [{
`(` $input `:` type($input) `)` attr-dict `to` type(results)
}];
}
def ReturnOp : Toy_Op<"return", [NoSideEffect, HasParent<"FuncOp">,
Terminator]> {
let summary = "return operation";
let description = [{
The "return" operation represents a return operation within a function.
The operation takes an optional tensor operand and produces no results.
The operand type must match the signature of the function that contains
the operation. For example:
```mlir
func @foo() -> tensor<2xf64> {
...
toy.return %0 : tensor<2xf64>
}
```
}];
// The return operation takes an optional input operand to return. This
// value must match the return type of the enclosing function.
let arguments = (ins Variadic<F64Tensor>:$input);
// The return operation only emits the input in the format if it is present.
let assemblyFormat = "($input^ `:` type($input))? attr-dict ";
// Allow building a ReturnOp with no return operand.
let builders = [OpBuilder<
"OpBuilder &b, OperationState &state", [{ build(b, state, llvm::None); }]
>];
// Provide extra utility definitions on the c++ operation class definition.
let extraClassDeclaration = [{
bool hasOperand() { return getNumOperands() != 0; }
}];
// Invoke a static verify method to verify this return operation.
let verifier = [{ return ::verify(*this); }];
}
def TransposeOp : Toy_Op<"transpose"> {
let summary = "transpose operation";
let arguments = (ins F64Tensor:$input);
let results = (outs F64Tensor);
let assemblyFormat = [{
`(` $input `:` type($input) `)` attr-dict `to` type(results)
}];
// Allow building a TransposeOp with from the input operand.
let builders = [
OpBuilder<"OpBuilder &b, OperationState &state, Value input">
];
// Invoke a static verify method to verify this transpose operation.
let verifier = [{ return ::verify(*this); }];
}
#endif // TOY_OPS

1279
test/c.cpp.inc Normal file

File diff suppressed because it is too large Load Diff

338
test/d.h.inc Normal file
View File

@ -0,0 +1,338 @@
/*===- TableGen'erated file -------------------------------------*- C++ -*-===*\
|* *|
|* Op Declarations *|
|* *|
|* Automatically generated file, do not edit! *|
|* *|
\*===----------------------------------------------------------------------===*/
#if defined(GET_OP_CLASSES) || defined(GET_OP_FWD_DEFINES)
#undef GET_OP_FWD_DEFINES
class AddOp;
class ConstantOp;
class GenericCallOp;
class MulOp;
class PrintOp;
class ReshapeOp;
class ReturnOp;
class TransposeOp;
#endif
#ifdef GET_OP_CLASSES
#undef GET_OP_CLASSES
//===----------------------------------------------------------------------===//
// toy::AddOp declarations
//===----------------------------------------------------------------------===//
class AddOpAdaptor {
public:
AddOpAdaptor(::mlir::ValueRange values, ::mlir::DictionaryAttr attrs = nullptr);
AddOpAdaptor(AddOp& op);
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::ValueRange getODSOperands(unsigned index);
::mlir::Value lhs();
::mlir::Value rhs();
::mlir::LogicalResult verify(::mlir::Location loc);
private:
::mlir::ValueRange odsOperands;
::mlir::DictionaryAttr odsAttrs;
};
class AddOp : public ::mlir::Op<AddOp, OpTrait::ZeroRegion, OpTrait::OneResult, OpTrait::ZeroSuccessor, OpTrait::NOperands<2>::Impl> {
public:
using Op::Op;
using Adaptor = AddOpAdaptor;
static ::llvm::StringRef getOperationName();
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::Operation::operand_range getODSOperands(unsigned index);
::mlir::Value lhs();
::mlir::Value rhs();
::mlir::MutableOperandRange lhsMutable();
::mlir::MutableOperandRange rhsMutable();
std::pair<unsigned, unsigned> getODSResultIndexAndLength(unsigned index);
::mlir::Operation::result_range getODSResults(unsigned index);
static void build(OpBuilder &b, OperationState &state, Value lhs, Value rhs);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::mlir::Type resultType0, ::mlir::Value lhs, ::mlir::Value rhs);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::Value lhs, ::mlir::Value rhs);
static void build(::mlir::OpBuilder &, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::ValueRange operands, ::llvm::ArrayRef<::mlir::NamedAttribute> attributes = {});
static ::mlir::ParseResult parse(::mlir::OpAsmParser &parser, ::mlir::OperationState &result);
void print(::mlir::OpAsmPrinter &p);
::mlir::LogicalResult verify();
};
//===----------------------------------------------------------------------===//
// toy::ConstantOp declarations
//===----------------------------------------------------------------------===//
class ConstantOpAdaptor {
public:
ConstantOpAdaptor(::mlir::ValueRange values, ::mlir::DictionaryAttr attrs = nullptr);
ConstantOpAdaptor(ConstantOp& op);
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::ValueRange getODSOperands(unsigned index);
::mlir::DenseElementsAttr value();
::mlir::LogicalResult verify(::mlir::Location loc);
private:
::mlir::ValueRange odsOperands;
::mlir::DictionaryAttr odsAttrs;
};
class ConstantOp : public ::mlir::Op<ConstantOp, OpTrait::ZeroRegion, OpTrait::OneResult, OpTrait::ZeroSuccessor, OpTrait::ZeroOperands, ::mlir::MemoryEffectOpInterface::Trait> {
public:
using Op::Op;
using Adaptor = ConstantOpAdaptor;
static ::llvm::StringRef getOperationName();
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::Operation::operand_range getODSOperands(unsigned index);
std::pair<unsigned, unsigned> getODSResultIndexAndLength(unsigned index);
::mlir::Operation::result_range getODSResults(unsigned index);
::mlir::DenseElementsAttr valueAttr();
::mlir::DenseElementsAttr value();
void valueAttr(::mlir::DenseElementsAttr attr);
static void build(OpBuilder &builder, OperationState &state, DenseElementsAttr value);
static void build(OpBuilder &builder, OperationState &state, double value);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::mlir::Type resultType0, ::mlir::DenseElementsAttr value);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::DenseElementsAttr value);
static void build(::mlir::OpBuilder &, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::ValueRange operands, ::llvm::ArrayRef<::mlir::NamedAttribute> attributes = {});
static ::mlir::ParseResult parse(::mlir::OpAsmParser &parser, ::mlir::OperationState &result);
void print(::mlir::OpAsmPrinter &p);
::mlir::LogicalResult verify();
void getEffects(::mlir::SmallVectorImpl<::mlir::SideEffects::EffectInstance<MemoryEffects::Effect>> &effects);
};
//===----------------------------------------------------------------------===//
// toy::GenericCallOp declarations
//===----------------------------------------------------------------------===//
class GenericCallOpAdaptor {
public:
GenericCallOpAdaptor(::mlir::ValueRange values, ::mlir::DictionaryAttr attrs = nullptr);
GenericCallOpAdaptor(GenericCallOp& op);
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::ValueRange getODSOperands(unsigned index);
::mlir::ValueRange inputs();
::mlir::FlatSymbolRefAttr callee();
::mlir::LogicalResult verify(::mlir::Location loc);
private:
::mlir::ValueRange odsOperands;
::mlir::DictionaryAttr odsAttrs;
};
class GenericCallOp : public ::mlir::Op<GenericCallOp, OpTrait::ZeroRegion, OpTrait::OneResult, OpTrait::ZeroSuccessor, OpTrait::VariadicOperands> {
public:
using Op::Op;
using Adaptor = GenericCallOpAdaptor;
static ::llvm::StringRef getOperationName();
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::Operation::operand_range getODSOperands(unsigned index);
::mlir::Operation::operand_range inputs();
::mlir::MutableOperandRange inputsMutable();
std::pair<unsigned, unsigned> getODSResultIndexAndLength(unsigned index);
::mlir::Operation::result_range getODSResults(unsigned index);
::mlir::FlatSymbolRefAttr calleeAttr();
::llvm::StringRef callee();
void calleeAttr(::mlir::FlatSymbolRefAttr attr);
static void build(OpBuilder &builder, OperationState &state, StringRef callee, ArrayRef<Value> arguments);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::mlir::Type resultType0, ::mlir::FlatSymbolRefAttr callee, ::mlir::ValueRange inputs);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::FlatSymbolRefAttr callee, ::mlir::ValueRange inputs);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::mlir::Type resultType0, ::llvm::StringRef callee, ::mlir::ValueRange inputs);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::llvm::StringRef callee, ::mlir::ValueRange inputs);
static void build(::mlir::OpBuilder &, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::ValueRange operands, ::llvm::ArrayRef<::mlir::NamedAttribute> attributes = {});
::mlir::LogicalResult verify();
static ::mlir::ParseResult parse(::mlir::OpAsmParser &parser, ::mlir::OperationState &result);
void print(OpAsmPrinter &p);
};
//===----------------------------------------------------------------------===//
// toy::MulOp declarations
//===----------------------------------------------------------------------===//
class MulOpAdaptor {
public:
MulOpAdaptor(::mlir::ValueRange values, ::mlir::DictionaryAttr attrs = nullptr);
MulOpAdaptor(MulOp& op);
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::ValueRange getODSOperands(unsigned index);
::mlir::Value lhs();
::mlir::Value rhs();
::mlir::LogicalResult verify(::mlir::Location loc);
private:
::mlir::ValueRange odsOperands;
::mlir::DictionaryAttr odsAttrs;
};
class MulOp : public ::mlir::Op<MulOp, OpTrait::ZeroRegion, OpTrait::OneResult, OpTrait::ZeroSuccessor, OpTrait::NOperands<2>::Impl> {
public:
using Op::Op;
using Adaptor = MulOpAdaptor;
static ::llvm::StringRef getOperationName();
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::Operation::operand_range getODSOperands(unsigned index);
::mlir::Value lhs();
::mlir::Value rhs();
::mlir::MutableOperandRange lhsMutable();
::mlir::MutableOperandRange rhsMutable();
std::pair<unsigned, unsigned> getODSResultIndexAndLength(unsigned index);
::mlir::Operation::result_range getODSResults(unsigned index);
static void build(OpBuilder &b, OperationState &state, Value lhs, Value rhs);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::mlir::Type resultType0, ::mlir::Value lhs, ::mlir::Value rhs);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::Value lhs, ::mlir::Value rhs);
static void build(::mlir::OpBuilder &, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::ValueRange operands, ::llvm::ArrayRef<::mlir::NamedAttribute> attributes = {});
static ::mlir::ParseResult parse(::mlir::OpAsmParser &parser, ::mlir::OperationState &result);
void print(::mlir::OpAsmPrinter &p);
::mlir::LogicalResult verify();
};
//===----------------------------------------------------------------------===//
// toy::PrintOp declarations
//===----------------------------------------------------------------------===//
class PrintOpAdaptor {
public:
PrintOpAdaptor(::mlir::ValueRange values, ::mlir::DictionaryAttr attrs = nullptr);
PrintOpAdaptor(PrintOp& op);
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::ValueRange getODSOperands(unsigned index);
::mlir::Value input();
::mlir::LogicalResult verify(::mlir::Location loc);
private:
::mlir::ValueRange odsOperands;
::mlir::DictionaryAttr odsAttrs;
};
class PrintOp : public ::mlir::Op<PrintOp, OpTrait::ZeroRegion, OpTrait::ZeroResult, OpTrait::ZeroSuccessor, OpTrait::OneOperand> {
public:
using Op::Op;
using Adaptor = PrintOpAdaptor;
static ::llvm::StringRef getOperationName();
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::Operation::operand_range getODSOperands(unsigned index);
::mlir::Value input();
::mlir::MutableOperandRange inputMutable();
std::pair<unsigned, unsigned> getODSResultIndexAndLength(unsigned index);
::mlir::Operation::result_range getODSResults(unsigned index);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::mlir::Value input);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::Value input);
static void build(::mlir::OpBuilder &, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::ValueRange operands, ::llvm::ArrayRef<::mlir::NamedAttribute> attributes = {});
::mlir::LogicalResult verify();
static ::mlir::ParseResult parse(::mlir::OpAsmParser &parser, ::mlir::OperationState &result);
void print(OpAsmPrinter &p);
};
//===----------------------------------------------------------------------===//
// toy::ReshapeOp declarations
//===----------------------------------------------------------------------===//
class ReshapeOpAdaptor {
public:
ReshapeOpAdaptor(::mlir::ValueRange values, ::mlir::DictionaryAttr attrs = nullptr);
ReshapeOpAdaptor(ReshapeOp& op);
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::ValueRange getODSOperands(unsigned index);
::mlir::Value input();
::mlir::LogicalResult verify(::mlir::Location loc);
private:
::mlir::ValueRange odsOperands;
::mlir::DictionaryAttr odsAttrs;
};
class ReshapeOp : public ::mlir::Op<ReshapeOp, OpTrait::ZeroRegion, OpTrait::OneResult, OpTrait::ZeroSuccessor, OpTrait::OneOperand> {
public:
using Op::Op;
using Adaptor = ReshapeOpAdaptor;
static ::llvm::StringRef getOperationName();
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::Operation::operand_range getODSOperands(unsigned index);
::mlir::Value input();
::mlir::MutableOperandRange inputMutable();
std::pair<unsigned, unsigned> getODSResultIndexAndLength(unsigned index);
::mlir::Operation::result_range getODSResults(unsigned index);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::mlir::Type resultType0, ::mlir::Value input);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::Value input);
static void build(::mlir::OpBuilder &, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::ValueRange operands, ::llvm::ArrayRef<::mlir::NamedAttribute> attributes = {});
::mlir::LogicalResult verify();
static ::mlir::ParseResult parse(::mlir::OpAsmParser &parser, ::mlir::OperationState &result);
void print(OpAsmPrinter &p);
};
//===----------------------------------------------------------------------===//
// toy::ReturnOp declarations
//===----------------------------------------------------------------------===//
class ReturnOpAdaptor {
public:
ReturnOpAdaptor(::mlir::ValueRange values, ::mlir::DictionaryAttr attrs = nullptr);
ReturnOpAdaptor(ReturnOp& op);
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::ValueRange getODSOperands(unsigned index);
::mlir::ValueRange input();
::mlir::LogicalResult verify(::mlir::Location loc);
private:
::mlir::ValueRange odsOperands;
::mlir::DictionaryAttr odsAttrs;
};
class ReturnOp : public ::mlir::Op<ReturnOp, OpTrait::ZeroRegion, OpTrait::ZeroResult, OpTrait::ZeroSuccessor, OpTrait::VariadicOperands, ::mlir::MemoryEffectOpInterface::Trait, OpTrait::HasParent<FuncOp>::Impl, OpTrait::IsTerminator> {
public:
using Op::Op;
using Adaptor = ReturnOpAdaptor;
static ::llvm::StringRef getOperationName();
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::Operation::operand_range getODSOperands(unsigned index);
::mlir::Operation::operand_range input();
::mlir::MutableOperandRange inputMutable();
std::pair<unsigned, unsigned> getODSResultIndexAndLength(unsigned index);
::mlir::Operation::result_range getODSResults(unsigned index);
static void build(OpBuilder &b, OperationState &state);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::mlir::ValueRange input);
static void build(::mlir::OpBuilder &, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::ValueRange operands, ::llvm::ArrayRef<::mlir::NamedAttribute> attributes = {});
::mlir::LogicalResult verify();
static ::mlir::ParseResult parse(::mlir::OpAsmParser &parser, ::mlir::OperationState &result);
void print(OpAsmPrinter &p);
void getEffects(::mlir::SmallVectorImpl<::mlir::SideEffects::EffectInstance<MemoryEffects::Effect>> &effects);
bool hasOperand() { return getNumOperands() != 0; }
};
//===----------------------------------------------------------------------===//
// toy::TransposeOp declarations
//===----------------------------------------------------------------------===//
class TransposeOpAdaptor {
public:
TransposeOpAdaptor(::mlir::ValueRange values, ::mlir::DictionaryAttr attrs = nullptr);
TransposeOpAdaptor(TransposeOp& op);
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::ValueRange getODSOperands(unsigned index);
::mlir::Value input();
::mlir::LogicalResult verify(::mlir::Location loc);
private:
::mlir::ValueRange odsOperands;
::mlir::DictionaryAttr odsAttrs;
};
class TransposeOp : public ::mlir::Op<TransposeOp, OpTrait::ZeroRegion, OpTrait::OneResult, OpTrait::ZeroSuccessor, OpTrait::OneOperand> {
public:
using Op::Op;
using Adaptor = TransposeOpAdaptor;
static ::llvm::StringRef getOperationName();
std::pair<unsigned, unsigned> getODSOperandIndexAndLength(unsigned index);
::mlir::Operation::operand_range getODSOperands(unsigned index);
::mlir::Value input();
::mlir::MutableOperandRange inputMutable();
std::pair<unsigned, unsigned> getODSResultIndexAndLength(unsigned index);
::mlir::Operation::result_range getODSResults(unsigned index);
static void build(OpBuilder &b, OperationState &state, Value input);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::mlir::Type resultType0, ::mlir::Value input);
static void build(::mlir::OpBuilder &odsBuilder, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::Value input);
static void build(::mlir::OpBuilder &, ::mlir::OperationState &odsState, ::llvm::ArrayRef<::mlir::Type> resultTypes, ::mlir::ValueRange operands, ::llvm::ArrayRef<::mlir::NamedAttribute> attributes = {});
::mlir::LogicalResult verify();
static ::mlir::ParseResult parse(::mlir::OpAsmParser &parser, ::mlir::OperationState &result);
void print(OpAsmPrinter &p);
};
#endif // GET_OP_CLASSES

2308
yarn.lock

File diff suppressed because it is too large Load Diff