Updates LLVM usage to match
[cb65419b1ac0](https://github.com/llvm/llvm-project/commit/cb65419b1ac0)

PiperOrigin-RevId: 375915516
This commit is contained in:
Benjamin Kramer 2021-05-26 04:46:41 -07:00 committed by TensorFlow MLIR Team
parent 4b3e9373e6
commit edf5ec8084
5 changed files with 11 additions and 15 deletions

View File

@ -15,9 +15,9 @@
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
LLVM_COMMIT = "bc56620b8b765b5720dbc1849511fb783e05b8cc" LLVM_COMMIT = "cb65419b1ac05c3020dd05b64db183712235d2ff"
LLVM_SHA256 = "dc996e2f96576274ddb089162c14afee173ff43cc6b1c8f76206a30f379087f0" LLVM_SHA256 = "95fcd4af4b21b6cb2ceda9e45d22c6b0014585258cc21727b5c864f7e9804fab"
LLVM_BAZEL_TAG = "llvm-project-{commit}".format(commit = LLVM_COMMIT) LLVM_BAZEL_TAG = "llvm-project-{commit}".format(commit = LLVM_COMMIT)

View File

@ -1,2 +1,2 @@
bc56620b8b765b5720dbc1849511fb783e05b8cc cb65419b1ac05c3020dd05b64db183712235d2ff

View File

@ -1669,9 +1669,7 @@ func @scatter_negative_index() -> tensor<3x3xi32> {
unique_indices = false unique_indices = false
} : (tensor<3x3xi32>, tensor<2xi32>, tensor<2x3xi32>) -> tensor<3x3xi32> } : (tensor<3x3xi32>, tensor<2xi32>, tensor<2x3xi32>) -> tensor<3x3xi32>
return %3 : tensor<3x3xi32> return %3 : tensor<3x3xi32>
// CHECK: constant dense<[ // CHECK: constant dense<{{\[}}[1, 2, 3], [4, 5, 6], [7, 8, 9]{{\]}}> : tensor<3x3xi32>
// CHECK-SAME: [1, 2, 3], [4, 5, 6], [7, 8, 9]
// CHECK-SAME: ]> : tensor<3x3xi32>
// CHECK: "mhlo.scatter" // CHECK: "mhlo.scatter"
} }
@ -1693,9 +1691,7 @@ func @scatter_out_of_bound() -> tensor<3x3xi32> {
unique_indices = false unique_indices = false
} : (tensor<3x3xi32>, tensor<2xi32>, tensor<2x3xi32>) -> tensor<3x3xi32> } : (tensor<3x3xi32>, tensor<2xi32>, tensor<2x3xi32>) -> tensor<3x3xi32>
return %3 : tensor<3x3xi32> return %3 : tensor<3x3xi32>
// CHECK: constant dense<[ // CHECK: constant dense<{{\[}}[1, 2, 3], [4, 5, 6], [7, 8, 9]{{\]}}> : tensor<3x3xi32>
// CHECK-SAME: [1, 2, 3], [4, 5, 6], [7, 8, 9]
// CHECK-SAME: ]> : tensor<3x3xi32>
// CHECK: "mhlo.scatter" // CHECK: "mhlo.scatter"
} }

View File

@ -129,8 +129,8 @@ func @selectv2_broadcast_all(%arg0: tensor<8x1x1xi1>, %arg1: tensor<1x8x1xi32>,
// CHECK-LABEL: func @selectv2_dynamic_ranked // CHECK-LABEL: func @selectv2_dynamic_ranked
func @selectv2_dynamic_ranked(%arg0: tensor<1xi1>, %arg1: tensor<2x?x8xi32>, %arg2: tensor<2x8x8xi32>) -> tensor<2x?x8xi32> { func @selectv2_dynamic_ranked(%arg0: tensor<1xi1>, %arg1: tensor<2x?x8xi32>, %arg2: tensor<2x8x8xi32>) -> tensor<2x?x8xi32> {
// CHECK-NEXT: %[[SHAPE0:.*]] = shape.const_shape [1] : tensor<1xindex> // CHECK-DAG: %[[SHAPE0:.*]] = shape.const_shape [1] : tensor<1xindex>
// CHECK-NEXT: %[[SHAPE2:.*]] = shape.const_shape [2, 8, 8] : tensor<3xindex> // CHECK-DAG: %[[SHAPE2:.*]] = shape.const_shape [2, 8, 8] : tensor<3xindex>
// CHECK-NEXT: %[[SHAPE1:.*]] = shape.shape_of %arg1 : tensor<2x?x8xi32> -> tensor<3xindex> // CHECK-NEXT: %[[SHAPE1:.*]] = shape.shape_of %arg1 : tensor<2x?x8xi32> -> tensor<3xindex>
// CHECK-NEXT: %[[CSTR:.*]] = shape.cstr_broadcastable %[[SHAPE1]], %[[SHAPE0]], %[[SHAPE2]] : tensor<3xindex>, tensor<1xindex>, tensor<3xindex> // CHECK-NEXT: %[[CSTR:.*]] = shape.cstr_broadcastable %[[SHAPE1]], %[[SHAPE0]], %[[SHAPE2]] : tensor<3xindex>, tensor<1xindex>, tensor<3xindex>
// CHECK-NEXT: %[[ASSUME:.*]] = shape.assuming %[[CSTR]] -> (tensor<2x?x8xi32>) { // CHECK-NEXT: %[[ASSUME:.*]] = shape.assuming %[[CSTR]] -> (tensor<2x?x8xi32>) {

View File

@ -35,8 +35,8 @@ func @and_fold() -> (tensor<i1>, tensor<i1>) {
}) {dimensions = dense<0> : tensor<1xi64>} : (tensor<2xi1>, tensor<i1>) -> tensor<i1> }) {dimensions = dense<0> : tensor<1xi64>} : (tensor<2xi1>, tensor<i1>) -> tensor<i1>
return %4, %5 : tensor<i1>, tensor<i1> return %4, %5 : tensor<i1>, tensor<i1>
// CHECK: %[[CST:.*]] = mhlo.constant dense<true> : tensor<i1> // CHECK-DAG: %[[CST:.*]] = mhlo.constant dense<true> : tensor<i1>
// CHECK: %[[CST1:.*]] = mhlo.constant dense<false> : tensor<i1> // CHECK-DAG: %[[CST1:.*]] = mhlo.constant dense<false> : tensor<i1>
// CHECK: return %[[CST]], %[[CST1]] : tensor<i1>, tensor<i1> // CHECK: return %[[CST]], %[[CST1]] : tensor<i1>, tensor<i1>
} }
@ -60,7 +60,7 @@ func @or_fold() -> (tensor<i1>, tensor<i1>) {
}) {dimensions = dense<0> : tensor<1xi64>} : (tensor<2xi1>, tensor<i1>) -> tensor<i1> }) {dimensions = dense<0> : tensor<1xi64>} : (tensor<2xi1>, tensor<i1>) -> tensor<i1>
return %4, %5 : tensor<i1>, tensor<i1> return %4, %5 : tensor<i1>, tensor<i1>
// CHECK: %[[CST:.*]] = mhlo.constant dense<false> : tensor<i1> // CHECK-DAG: %[[CST:.*]] = mhlo.constant dense<false> : tensor<i1>
// CHECK: %[[CST1:.*]] = mhlo.constant dense<true> : tensor<i1> // CHECK-DAG: %[[CST1:.*]] = mhlo.constant dense<true> : tensor<i1>
// CHECK: return %[[CST]], %[[CST1]] : tensor<i1>, tensor<i1> // CHECK: return %[[CST]], %[[CST1]] : tensor<i1>, tensor<i1>
} }