Integrate LLVM at llvm/llvm-project@482283042f
Updates LLVM usage to match [482283042f79](https://github.com/llvm/llvm-project/commit/482283042f79) PiperOrigin-RevId: 365710568
This commit is contained in:
parent
12a82dfe44
commit
9ebadc4c4d
|
@ -15,9 +15,9 @@
|
||||||
|
|
||||||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||||
|
|
||||||
LLVM_COMMIT = "20d5c42e0ef5d252b434bcb610b04f1cb79fe771"
|
LLVM_COMMIT = "482283042f795ecc27838a3b2f76b5494991401c"
|
||||||
|
|
||||||
LLVM_SHA256 = "d5ec1b6318510c8bc349c41edf985d087785fc6ae63274d1319a344f30eabfc6"
|
LLVM_SHA256 = "350b8bd0def4bef191b512a79923c3e591e47b189d63e37abf149ac4751d2334"
|
||||||
|
|
||||||
LLVM_BAZEL_TAG = "llvm-project-{commit}".format(commit = LLVM_COMMIT)
|
LLVM_BAZEL_TAG = "llvm-project-{commit}".format(commit = LLVM_COMMIT)
|
||||||
|
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
20d5c42e0ef5d252b434bcb610b04f1cb79fe771
|
482283042f795ecc27838a3b2f76b5494991401c
|
||||||
|
|
||||||
|
|
|
@ -80,7 +80,7 @@ class HLOClient_BroadcastBinaryElementwiseOp<
|
||||||
HLOClient_Op<mnemonic,
|
HLOClient_Op<mnemonic,
|
||||||
!listconcat(traits, [
|
!listconcat(traits, [
|
||||||
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
||||||
["reifyReturnTypeShapes"]>])> {
|
["inferReturnTypeComponents", "reifyReturnTypeShapes"]>])> {
|
||||||
let arguments = (ins
|
let arguments = (ins
|
||||||
HLO_Tensor:$lhs,
|
HLO_Tensor:$lhs,
|
||||||
HLO_Tensor:$rhs,
|
HLO_Tensor:$rhs,
|
||||||
|
@ -558,7 +558,8 @@ def HLOClient_TanOp : HLOClient_UnaryElementwiseOp<"tan",
|
||||||
def HLOClient_ConstantLikeOp : HLOClient_Op<"constant_like",
|
def HLOClient_ConstantLikeOp : HLOClient_Op<"constant_like",
|
||||||
[NoSideEffect, SameOperandsAndResultShape,
|
[NoSideEffect, SameOperandsAndResultShape,
|
||||||
InferTypeOpInterface,
|
InferTypeOpInterface,
|
||||||
DeclareOpInterfaceMethods<InferShapedTypeOpInterface>,
|
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
||||||
|
["inferReturnTypeComponents"]>,
|
||||||
NativeOpTrait<"InferTensorType">]> {
|
NativeOpTrait<"InferTensorType">]> {
|
||||||
let summary = "Constant like operator";
|
let summary = "Constant like operator";
|
||||||
|
|
||||||
|
@ -684,7 +685,9 @@ def HLOClient_BroadcastCompareOp : HLOClient_BroadcastBinaryElementwiseOp<
|
||||||
|
|
||||||
def HLOClient_BroadcastSelectOp : HLOClient_Op<
|
def HLOClient_BroadcastSelectOp : HLOClient_Op<
|
||||||
"broadcast_select",
|
"broadcast_select",
|
||||||
[NoSideEffect, DeclareOpInterfaceMethods<InferShapedTypeOpInterface>]> {
|
[NoSideEffect,
|
||||||
|
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
||||||
|
["inferReturnTypeComponents"]>]> {
|
||||||
string summary = "Select operator (with optional numpy-style broadcasting)";
|
string summary = "Select operator (with optional numpy-style broadcasting)";
|
||||||
|
|
||||||
string description = [{
|
string description = [{
|
||||||
|
|
|
@ -677,7 +677,8 @@ def HLO_TupleOp : HLO_Op<"tuple", [NoSideEffect]>, BASE_HLO_TupleOp {
|
||||||
def HLO_CompareOp: HLO_Op<"compare", [NoSideEffect, SameTypeOperands,
|
def HLO_CompareOp: HLO_Op<"compare", [NoSideEffect, SameTypeOperands,
|
||||||
SameOperandsAndResultShape,
|
SameOperandsAndResultShape,
|
||||||
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
||||||
["reifyReturnTypeShapes"]>]>, BASE_HLO_CompareOp {
|
["inferReturnTypeComponents", "reifyReturnTypeShapes"]>]>,
|
||||||
|
BASE_HLO_CompareOp {
|
||||||
let arguments = (ins
|
let arguments = (ins
|
||||||
HLO_Tensor:$lhs,
|
HLO_Tensor:$lhs,
|
||||||
HLO_Tensor:$rhs,
|
HLO_Tensor:$rhs,
|
||||||
|
@ -827,7 +828,7 @@ def HLO_BroadcastInDimOp : HLO_Op<"broadcast_in_dim",
|
||||||
|
|
||||||
def HLO_DynamicBroadcastInDimOp : HLO_Op<"dynamic_broadcast_in_dim", [
|
def HLO_DynamicBroadcastInDimOp : HLO_Op<"dynamic_broadcast_in_dim", [
|
||||||
NoSideEffect, DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
NoSideEffect, DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
||||||
["reifyReturnTypeShapes"]>]> {
|
["inferReturnTypeComponents", "reifyReturnTypeShapes"]>]> {
|
||||||
string summary = "Broadcast a tensor into the given dynamic shape by adding dimensions.";
|
string summary = "Broadcast a tensor into the given dynamic shape by adding dimensions.";
|
||||||
string description = [{
|
string description = [{
|
||||||
This is a generalization of the BroadcastInDimOp which accepts its output
|
This is a generalization of the BroadcastInDimOp which accepts its output
|
||||||
|
@ -1114,7 +1115,8 @@ def HLO_ScatterOp: HLO_Op<"scatter", [RecursiveSideEffects]>,
|
||||||
// TODO(jpienaar): Add broadcastable trait.
|
// TODO(jpienaar): Add broadcastable trait.
|
||||||
def HLO_SelectOp: HLO_Op<"select", [NoSideEffect,
|
def HLO_SelectOp: HLO_Op<"select", [NoSideEffect,
|
||||||
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
|
||||||
["reifyReturnTypeShapes"]>, DeclareOpInterfaceMethods<InferTypeOpInterface>,
|
["inferReturnTypeComponents", "reifyReturnTypeShapes"]>,
|
||||||
|
DeclareOpInterfaceMethods<InferTypeOpInterface>,
|
||||||
]>, BASE_HLO_SelectOp {
|
]>, BASE_HLO_SelectOp {
|
||||||
let arguments = (ins
|
let arguments = (ins
|
||||||
HLO_PredTensor:$pred,
|
HLO_PredTensor:$pred,
|
||||||
|
|
|
@ -4,9 +4,8 @@
|
||||||
// CHECK-LABEL: @shape_of_unary
|
// CHECK-LABEL: @shape_of_unary
|
||||||
// CHECK-SAME: (%[[ARG:.*]]: tensor<?x32xi16>)
|
// CHECK-SAME: (%[[ARG:.*]]: tensor<?x32xi16>)
|
||||||
func @shape_of_unary(%arg : tensor<?x32xi16>) {
|
func @shape_of_unary(%arg : tensor<?x32xi16>) {
|
||||||
// CHECK: %[[SHAPE:.*]] = shape.shape_of %[[ARG]] : tensor<?x32xi16> -> tensor<2xindex>
|
// CHECK: %[[SHAPE:.*]] = shape.shape_of %[[ARG]] : tensor<?x32xi16> -> tensor<?xindex>
|
||||||
// CHECK: %[[CASTED:.*]] = tensor.cast %[[SHAPE]] : tensor<2xindex> to tensor<?xindex>
|
// CHECK: "use"(%[[SHAPE]])
|
||||||
// CHECK: "use"(%[[CASTED]])
|
|
||||||
%0 = "mhlo.convert"(%arg) : (tensor<?x32xi16>) -> tensor<?x32xf16>
|
%0 = "mhlo.convert"(%arg) : (tensor<?x32xi16>) -> tensor<?x32xf16>
|
||||||
%1 = shape.shape_of %0 : tensor<?x32xf16> -> tensor<?xindex>
|
%1 = shape.shape_of %0 : tensor<?x32xf16> -> tensor<?xindex>
|
||||||
"use"(%1) : (tensor<?xindex>) -> ()
|
"use"(%1) : (tensor<?xindex>) -> ()
|
||||||
|
@ -19,9 +18,8 @@ func @shape_of_unary(%arg : tensor<?x32xi16>) {
|
||||||
// CHECK-LABEL: @shape_of_nary
|
// CHECK-LABEL: @shape_of_nary
|
||||||
// CHECK-SAME: (%[[ARG0:.*]]: tensor<?x32xf16>, %[[ARG1:.*]]: tensor<?x32xf16>)
|
// CHECK-SAME: (%[[ARG0:.*]]: tensor<?x32xf16>, %[[ARG1:.*]]: tensor<?x32xf16>)
|
||||||
func @shape_of_nary(%arg0 : tensor<?x32xf16>, %arg1 : tensor<?x32xf16>) {
|
func @shape_of_nary(%arg0 : tensor<?x32xf16>, %arg1 : tensor<?x32xf16>) {
|
||||||
// CHECK: %[[SHAPE:.*]] = shape.shape_of %[[ARG0]] : tensor<?x32xf16> -> tensor<2xindex>
|
// CHECK: %[[SHAPE:.*]] = shape.shape_of %[[ARG0]] : tensor<?x32xf16> -> tensor<?xindex>
|
||||||
// CHECK: %[[CASTED:.*]] = tensor.cast %[[SHAPE]] : tensor<2xindex> to tensor<?xindex>
|
// CHECK: "use"(%[[SHAPE]])
|
||||||
// CHECK: "use"(%[[CASTED]])
|
|
||||||
%0 = mhlo.subtract %arg0, %arg1 : tensor<?x32xf16>
|
%0 = mhlo.subtract %arg0, %arg1 : tensor<?x32xf16>
|
||||||
%1 = mhlo.subtract %0, %arg1 : tensor<?x32xf16>
|
%1 = mhlo.subtract %0, %arg1 : tensor<?x32xf16>
|
||||||
%2 = shape.shape_of %1 : tensor<?x32xf16> -> tensor<?xindex>
|
%2 = shape.shape_of %1 : tensor<?x32xf16> -> tensor<?xindex>
|
||||||
|
|
Loading…
Reference in New Issue