changes for mypipeline.onnx (#202)

* changes for mypipeline.onnx

* format

* rm MLOpBuildTable.inc

* copy string without free

* fix the memory issue

* restore change for STRING

* format

Co-authored-by: Tian Jin <tjingrant@gmail.com>
This commit is contained in:
chentong319 2020-07-08 16:52:56 -04:00 committed by GitHub
parent 100bfc81b4
commit 8848464a23
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 34 additions and 61 deletions

View File

@ -128,6 +128,13 @@ private:
case onnx::AttributeProto::TENSOR: case onnx::AttributeProto::TENSOR:
mlirAttr = onnxTensorProtoToDenseElmAttr(builder_, attr.t()); mlirAttr = onnxTensorProtoToDenseElmAttr(builder_, attr.t());
break; break;
case onnx::AttributeProto::STRINGS: {
llvm::SmallVector<mlir::StringRef, 4> vectorStringRef;
for (const auto &item : attr.strings()) {
vectorStringRef.push_back(llvm::StringRef(item));
}
mlirAttr = builder_.getStrArrayAttr(llvm::makeArrayRef(vectorStringRef));
} break;
default: default:
llvm_unreachable("datatype for attribute is not implemented"); llvm_unreachable("datatype for attribute is not implemented");
break; break;

View File

@ -1,42 +0,0 @@
//********************************************************
// Do not modify this file directly.
// This file is automatically generated via script.
// Details can be found in docs/readonnxdefs.md .
//********************************************************
if (opName == "ArrayFeatureExtractor")
buildOperation<mlir::MLONNXArrayFeatureExtractorOp>(node);
if (opName == "Binarizer")
buildOperation<mlir::MLONNXBinarizerOp>(node);
if (opName == "CastMap")
buildOperation<mlir::MLONNXCastMapOp>(node);
if (opName == "CategoryMapper")
buildOperation<mlir::MLONNXCategoryMapperOp>(node);
if (opName == "DictVectorizer")
buildOperation<mlir::MLONNXDictVectorizerOp>(node);
if (opName == "FeatureVectorizer")
buildOperation<mlir::MLONNXFeatureVectorizerOp>(node);
if (opName == "Imputer")
buildOperation<mlir::MLONNXImputerOp>(node);
if (opName == "LabelEncoder")
buildOperation<mlir::MLONNXLabelEncoderOp>(node);
if (opName == "LinearClassifier")
buildOperation<mlir::MLONNXLinearClassifierOp>(node);
if (opName == "LinearRegressor")
buildOperation<mlir::MLONNXLinearRegressorOp>(node);
if (opName == "Normalizer")
buildOperation<mlir::MLONNXNormalizerOp>(node);
if (opName == "OneHotEncoder")
buildOperation<mlir::MLONNXOneHotEncoderOp>(node);
if (opName == "SVMClassifier")
buildOperation<mlir::MLONNXSVMClassifierOp>(node);
if (opName == "SVMRegressor")
buildOperation<mlir::MLONNXSVMRegressorOp>(node);
if (opName == "Scaler")
buildOperation<mlir::MLONNXScalerOp>(node);
if (opName == "TreeEnsembleClassifier")
buildOperation<mlir::MLONNXTreeEnsembleClassifierOp>(node);
if (opName == "TreeEnsembleRegressor")
buildOperation<mlir::MLONNXTreeEnsembleRegressorOp>(node);
if (opName == "ZipMap")
buildOperation<mlir::MLONNXZipMapOp>(node);

View File

@ -204,7 +204,7 @@ def ONNXArgMaxOp:ONNX_Op<"ArgMax",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {4};
} }
}]; }];
} }
@ -230,7 +230,7 @@ def ONNXArgMinOp:ONNX_Op<"ArgMin",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {4};
} }
}]; }];
} }
@ -944,7 +944,7 @@ def ONNXDequantizeLinearOp:ONNX_Op<"DequantizeLinear",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {21}; return {7};
} }
}]; }];
} }
@ -1091,7 +1091,7 @@ def ONNXDynamicQuantizeLinearOp:ONNX_Op<"DynamicQuantizeLinear",
return 3; return 3;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {1,-1,1}; return {1,7,1};
} }
}]; }];
} }
@ -2914,7 +2914,7 @@ def ONNXNonMaxSuppressionOp:ONNX_Op<"NonMaxSuppression",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {22}; return {4};
} }
}]; }];
} }
@ -2938,7 +2938,7 @@ def ONNXNonZeroOp:ONNX_Op<"NonZero",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {4};
} }
}]; }];
} }
@ -5144,7 +5144,7 @@ def ONNXStringNormalizerOp:ONNX_Op<"StringNormalizer",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {20}; return {11};
} }
}]; }];
} }
@ -5526,7 +5526,7 @@ def ONNXUniqueOp:ONNX_Op<"Unique",
return 4; return 4;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {20,-1,-1,-1}; return {20,4,4,4};
} }
}]; }];
} }
@ -5823,7 +5823,7 @@ def ONNXFeatureVectorizerOp:ONNX_Op<"FeatureVectorizer",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {7};
} }
}]; }];
} }
@ -5929,7 +5929,7 @@ def ONNXLinearClassifierOp:ONNX_Op<"LinearClassifier",
return 2; return 2;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1,-1}; return {-1,7};
} }
}]; }];
} }
@ -5959,7 +5959,7 @@ def ONNXLinearRegressorOp:ONNX_Op<"LinearRegressor",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {7};
} }
}]; }];
} }
@ -5990,7 +5990,7 @@ def ONNXNormalizerOp:ONNX_Op<"Normalizer",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {7};
} }
}]; }];
} }
@ -6021,7 +6021,7 @@ def ONNXOneHotEncoderOp:ONNX_Op<"OneHotEncoder",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {7};
} }
}]; }];
} }
@ -6054,7 +6054,7 @@ def ONNXSVMClassifierOp:ONNX_Op<"SVMClassifier",
return 2; return 2;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1,-1}; return {-1,7};
} }
}]; }];
} }
@ -6083,7 +6083,7 @@ def ONNXSVMRegressorOp:ONNX_Op<"SVMRegressor",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {7};
} }
}]; }];
} }
@ -6106,7 +6106,7 @@ def ONNXScalerOp:ONNX_Op<"Scaler",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {7};
} }
}]; }];
} }
@ -6153,7 +6153,7 @@ def ONNXTreeEnsembleClassifierOp:ONNX_Op<"TreeEnsembleClassifier",
return 2; return 2;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1,-1}; return {-1,7};
} }
}]; }];
} }
@ -6200,7 +6200,7 @@ def ONNXTreeEnsembleRegressorOp:ONNX_Op<"TreeEnsembleRegressor",
return 1; return 1;
} }
static std::vector<int> getTypeMap() { static std::vector<int> getTypeMap() {
return {-1}; return {7};
} }
}]; }];
} }

View File

@ -428,6 +428,14 @@ def get_allowed_elem_types(schema, input):
# TODO: enable type constraints. # TODO: enable type constraints.
if input.typeStr : if input.typeStr :
tstr = input.typeStr tstr = input.typeStr
structure, element = get_data_structure_element(tstr);
# In case the type is directly specified
if structure and element :
t = np_type_to_tblgen_attr_type(element)
if t == None :
return allowed_structure, None
else :
return structure, [t]
else : else :
return None return None
if schema.type_constraints: if schema.type_constraints: