Updates LLVM usage to match
[482283042f79](https://github.com/llvm/llvm-project/commit/482283042f79)

PiperOrigin-RevId: 365710568
This commit is contained in:
A. Unique TensorFlower 2021-03-29 18:28:52 -07:00 committed by TensorFlow MLIR Team
parent 12a82dfe44
commit 9ebadc4c4d
5 changed files with 18 additions and 15 deletions

View File

@ -15,9 +15,9 @@
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
LLVM_COMMIT = "20d5c42e0ef5d252b434bcb610b04f1cb79fe771"
LLVM_COMMIT = "482283042f795ecc27838a3b2f76b5494991401c"
LLVM_SHA256 = "d5ec1b6318510c8bc349c41edf985d087785fc6ae63274d1319a344f30eabfc6"
LLVM_SHA256 = "350b8bd0def4bef191b512a79923c3e591e47b189d63e37abf149ac4751d2334"
LLVM_BAZEL_TAG = "llvm-project-{commit}".format(commit = LLVM_COMMIT)

View File

@ -1,2 +1,2 @@
20d5c42e0ef5d252b434bcb610b04f1cb79fe771
482283042f795ecc27838a3b2f76b5494991401c

View File

@ -80,7 +80,7 @@ class HLOClient_BroadcastBinaryElementwiseOp<
HLOClient_Op<mnemonic,
!listconcat(traits, [
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
["reifyReturnTypeShapes"]>])> {
["inferReturnTypeComponents", "reifyReturnTypeShapes"]>])> {
let arguments = (ins
HLO_Tensor:$lhs,
HLO_Tensor:$rhs,
@ -558,7 +558,8 @@ def HLOClient_TanOp : HLOClient_UnaryElementwiseOp<"tan",
def HLOClient_ConstantLikeOp : HLOClient_Op<"constant_like",
[NoSideEffect, SameOperandsAndResultShape,
InferTypeOpInterface,
DeclareOpInterfaceMethods<InferShapedTypeOpInterface>,
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
["inferReturnTypeComponents"]>,
NativeOpTrait<"InferTensorType">]> {
let summary = "Constant like operator";
@ -684,7 +685,9 @@ def HLOClient_BroadcastCompareOp : HLOClient_BroadcastBinaryElementwiseOp<
def HLOClient_BroadcastSelectOp : HLOClient_Op<
"broadcast_select",
[NoSideEffect, DeclareOpInterfaceMethods<InferShapedTypeOpInterface>]> {
[NoSideEffect,
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
["inferReturnTypeComponents"]>]> {
string summary = "Select operator (with optional numpy-style broadcasting)";
string description = [{

View File

@ -677,7 +677,8 @@ def HLO_TupleOp : HLO_Op<"tuple", [NoSideEffect]>, BASE_HLO_TupleOp {
def HLO_CompareOp: HLO_Op<"compare", [NoSideEffect, SameTypeOperands,
SameOperandsAndResultShape,
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
["reifyReturnTypeShapes"]>]>, BASE_HLO_CompareOp {
["inferReturnTypeComponents", "reifyReturnTypeShapes"]>]>,
BASE_HLO_CompareOp {
let arguments = (ins
HLO_Tensor:$lhs,
HLO_Tensor:$rhs,
@ -827,7 +828,7 @@ def HLO_BroadcastInDimOp : HLO_Op<"broadcast_in_dim",
def HLO_DynamicBroadcastInDimOp : HLO_Op<"dynamic_broadcast_in_dim", [
NoSideEffect, DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
["reifyReturnTypeShapes"]>]> {
["inferReturnTypeComponents", "reifyReturnTypeShapes"]>]> {
string summary = "Broadcast a tensor into the given dynamic shape by adding dimensions.";
string description = [{
This is a generalization of the BroadcastInDimOp which accepts its output
@ -1114,7 +1115,8 @@ def HLO_ScatterOp: HLO_Op<"scatter", [RecursiveSideEffects]>,
// TODO(jpienaar): Add broadcastable trait.
def HLO_SelectOp: HLO_Op<"select", [NoSideEffect,
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
["reifyReturnTypeShapes"]>, DeclareOpInterfaceMethods<InferTypeOpInterface>,
["inferReturnTypeComponents", "reifyReturnTypeShapes"]>,
DeclareOpInterfaceMethods<InferTypeOpInterface>,
]>, BASE_HLO_SelectOp {
let arguments = (ins
HLO_PredTensor:$pred,

View File

@ -4,9 +4,8 @@
// CHECK-LABEL: @shape_of_unary
// CHECK-SAME: (%[[ARG:.*]]: tensor<?x32xi16>)
func @shape_of_unary(%arg : tensor<?x32xi16>) {
// CHECK: %[[SHAPE:.*]] = shape.shape_of %[[ARG]] : tensor<?x32xi16> -> tensor<2xindex>
// CHECK: %[[CASTED:.*]] = tensor.cast %[[SHAPE]] : tensor<2xindex> to tensor<?xindex>
// CHECK: "use"(%[[CASTED]])
// CHECK: %[[SHAPE:.*]] = shape.shape_of %[[ARG]] : tensor<?x32xi16> -> tensor<?xindex>
// CHECK: "use"(%[[SHAPE]])
%0 = "mhlo.convert"(%arg) : (tensor<?x32xi16>) -> tensor<?x32xf16>
%1 = shape.shape_of %0 : tensor<?x32xf16> -> tensor<?xindex>
"use"(%1) : (tensor<?xindex>) -> ()
@ -19,9 +18,8 @@ func @shape_of_unary(%arg : tensor<?x32xi16>) {
// CHECK-LABEL: @shape_of_nary
// CHECK-SAME: (%[[ARG0:.*]]: tensor<?x32xf16>, %[[ARG1:.*]]: tensor<?x32xf16>)
func @shape_of_nary(%arg0 : tensor<?x32xf16>, %arg1 : tensor<?x32xf16>) {
// CHECK: %[[SHAPE:.*]] = shape.shape_of %[[ARG0]] : tensor<?x32xf16> -> tensor<2xindex>
// CHECK: %[[CASTED:.*]] = tensor.cast %[[SHAPE]] : tensor<2xindex> to tensor<?xindex>
// CHECK: "use"(%[[CASTED]])
// CHECK: %[[SHAPE:.*]] = shape.shape_of %[[ARG0]] : tensor<?x32xf16> -> tensor<?xindex>
// CHECK: "use"(%[[SHAPE]])
%0 = mhlo.subtract %arg0, %arg1 : tensor<?x32xf16>
%1 = mhlo.subtract %0, %arg1 : tensor<?x32xf16>
%2 = shape.shape_of %1 : tensor<?x32xf16> -> tensor<?xindex>