add build in dialect onnx

This commit is contained in:
colin 2020-10-10 21:05:48 +08:00
parent c440bd4dba
commit 8af23c97e9
8 changed files with 1454 additions and 10 deletions

146
WORKSPACE
View File

@ -2,10 +2,15 @@ workspace(name = "onnxmlir")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository", "new_git_repository") load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository", "new_git_repository")
load("//third_party:repo.bzl", "tf_http_archive")
# def clean_dep(dep):
# return str(Label(dep))
LLVM_COMMIT = "85763e0758fbd238c81f233c6f9510e81c7de177" # llvm 12
LLVM_COMMIT_xx = "91671e13efbc5dbd17b832d7973401350d0a6ee6" #
LLVM_COMMIT = "85763e0758fbd238c81f233c6f9510e81c7de177"
LLVM_BAZEL_TAG = "llvm-project-%s" % (LLVM_COMMIT,) LLVM_BAZEL_TAG = "llvm-project-%s" % (LLVM_COMMIT,)
LLVM_BAZEL_SHA256 = "5d358075abc2db8192c138bdaa6ce74f2c59a0bde6d7d57813f3fc66d6b6da34" LLVM_BAZEL_SHA256 = "5d358075abc2db8192c138bdaa6ce74f2c59a0bde6d7d57813f3fc66d6b6da34"
http_archive( http_archive(
@ -14,16 +19,20 @@ http_archive(
strip_prefix = "llvm-bazel-{tag}/llvm-bazel".format(tag = LLVM_BAZEL_TAG), strip_prefix = "llvm-bazel-{tag}/llvm-bazel".format(tag = LLVM_BAZEL_TAG),
url = "https://github.com/google/llvm-bazel/archive/{tag}.tar.gz".format(tag = LLVM_BAZEL_TAG), url = "https://github.com/google/llvm-bazel/archive/{tag}.tar.gz".format(tag = LLVM_BAZEL_TAG),
) )
LLVM_SHA256 = "f33108ba4bc81c6704753838ee4d85ad87e195fda3df991c2b00f18872a9e2dd"
# LLVM_SHA256 = "f33108ba4bc81c6704753838ee4d85ad87e195fda3df991c2b00f18872a9e2dd"
LLVM_SHA256 = ""
LLVM_URLS = [ LLVM_URLS = [
# "https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), # "https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), "https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT_xx),
# "https://github.com/llvm/llvm-project/archive/llvmorg-11.0.0-rc6.tar.gz"
] ]
http_archive( http_archive(
name = "llvm-project-raw", name = "llvm-project-raw",
build_file_content = "#empty", build_file_content = "#empty",
sha256 = LLVM_SHA256, sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-" + LLVM_COMMIT, strip_prefix = "llvm-project-" + LLVM_COMMIT_xx,
# strip_prefix = "llvm-project-85763e0758fbd238c81f233c6f9510e81c7de177",
urls = LLVM_URLS, urls = LLVM_URLS,
) )
load("@llvm-bazel//:configure.bzl", "llvm_configure") load("@llvm-bazel//:configure.bzl", "llvm_configure")
@ -33,19 +42,136 @@ llvm_configure(
src_workspace = "@llvm-project-raw//:WORKSPACE", src_workspace = "@llvm-project-raw//:WORKSPACE",
) )
ONNX_MLIR_SHA256 = "f33108ba4bc81c6704753838ee4d85ad87e195fda3df991c2b00f18872a9e2dd" ONNX_MLIR_SHA256 = "f33108ba4bc81c6704753838ee4d85ad87e195fda3df991c2b00f18872a9e2dd"
# http_archive(
# name = "onnx-mlir-raw",
# build_file_content = "#empty",
# sha256 = ONNX_MLIR_SHA256,
# strip_prefix = "src",
# urls = [
# "https://github.com/onnx/onnx-mlir.git",
# ],
# )
# protobuf #####################################################################
# http_archive(
# name = "protobuf_archive",
# sha256 = "1c020fafc84acd235ec81c6aac22d73f23e85a700871466052ff231d69c1b17a",
# strip_prefix = "protobuf-5902e759108d14ee8e6b0b07653dac2f4e70ac73",
# build_file = "//third_party:protobuf.BUILD",
# # system_build_file = clean_dep("//third_party/systemlibs:protobuf.BUILD"),
# # system_link_files = {
# # "//third_party/systemlibs:protobuf.bzl": "protobuf.bzl",
# # },
# urls = [
# "https://github.com/protocolbuffers/protobuf/archive/5902e759108d14ee8e6b0b07653dac2f4e70ac73.tar.gz",
# "http://mirror.tensorflow.org/github.com/protocolbuffers/protobuf/archive/5902e759108d14ee8e6b0b07653dac2f4e70ac73.tar.gz",
# ],
# )
# PROTOBUF_URLS = [
# "http://mirror.tensorflow.org/github.com/protocolbuffers/protobuf/archive/5902e759108d14ee8e6b0b07653dac2f4e70ac73.tar.gz",
# "https://github.com/protocolbuffers/protobuf/archive/5902e759108d14ee8e6b0b07653dac2f4e70ac73.tar.gz",
# ]
# PROTOBUF_SHA256 = "1c020fafc84acd235ec81c6aac22d73f23e85a700871466052ff231d69c1b17a"
# PROTOBUF_STRIP_PREFIX = "protobuf-5902e759108d14ee8e6b0b07653dac2f4e70ac73"
# tf_http_archive(
# name = "protobuf_archive",
# sha256 = PROTOBUF_SHA256,
# strip_prefix = PROTOBUF_STRIP_PREFIX,
# system_build_file = clean_dep("//third_party/systemlibs:protobuf.BUILD"),
# system_link_files = {
# "//third_party/systemlibs:protobuf.bzl": "protobuf.bzl",
# },
# urls = PROTOBUF_URLS,
# )
# # We need to import the protobuf library under the names com_google_protobuf
# # and com_google_protobuf_cc to enable proto_library support in bazel.
# # Unfortunately there is no way to alias http_archives at the moment.
# tf_http_archive(
# name = "com_google_protobuf",
# sha256 = PROTOBUF_SHA256,
# strip_prefix = PROTOBUF_STRIP_PREFIX,
# system_build_file = clean_dep("//third_party/systemlibs:protobuf.BUILD"),
# system_link_files = {
# "//third_party/systemlibs:protobuf.bzl": "protobuf.bzl",
# },
# urls = PROTOBUF_URLS,
# )
# tf_http_archive(
# name = "com_google_protobuf_cc",
# sha256 = PROTOBUF_SHA256,
# strip_prefix = PROTOBUF_STRIP_PREFIX,
# system_build_file = clean_dep("//third_party/systemlibs:protobuf.BUILD"),
# system_link_files = {
# "//third_party/systemlibs:protobuf.bzl": "protobuf.bzl",
# },
# urls = PROTOBUF_URLS,
# )
# rules_cc defines rules for generating C++ code from Protocol Buffers.
http_archive( http_archive(
name = "onnx-mlir-raw", name = "rules_cc",
build_file_content = "#empty", sha256 = "35f2fb4ea0b3e61ad64a369de284e4fbbdcdba71836a5555abb5e194cf119509",
sha256 = ONNX_MLIR_SHA256, strip_prefix = "rules_cc-624b5d59dfb45672d4239422fa1e3de1822ee110",
strip_prefix = "src",
urls = [ urls = [
"https://github.com/onnx/onnx-mlir.git", "https://mirror.bazel.build/github.com/bazelbuild/rules_cc/archive/624b5d59dfb45672d4239422fa1e3de1822ee110.tar.gz",
"https://github.com/bazelbuild/rules_cc/archive/624b5d59dfb45672d4239422fa1e3de1822ee110.tar.gz",
], ],
) )
# rules_proto defines abstract rules for building Protocol Buffers.
http_archive(
name = "rules_proto",
sha256 = "2490dca4f249b8a9a3ab07bd1ba6eca085aaf8e45a734af92aad0c42d9dc7aaf",
strip_prefix = "rules_proto-218ffa7dfa5408492dc86c01ee637614f8695c45",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_proto/archive/218ffa7dfa5408492dc86c01ee637614f8695c45.tar.gz",
"https://github.com/bazelbuild/rules_proto/archive/218ffa7dfa5408492dc86c01ee637614f8695c45.tar.gz",
],
)
load("@rules_cc//cc:repositories.bzl", "rules_cc_dependencies")
rules_cc_dependencies()
load("@rules_proto//proto:repositories.bzl", "rules_proto_dependencies", "rules_proto_toolchains")
rules_proto_dependencies()
rules_proto_toolchains()
###############################################################################
# Skylib #######################################################################
http_archive(
name = "bazel_skylib",
sha256 = "97e70364e9249702246c0e9444bccdc4b847bed1eb03c5a3ece4f83dfe6abc44",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.0.2/bazel-skylib-1.0.2.tar.gz",
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.2/bazel-skylib-1.0.2.tar.gz",
],
)
load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
bazel_skylib_workspace()
###############################################################################
load(":onnxmlir.bzl", "onnxmlir_deps") load(":onnxmlir.bzl", "onnxmlir_deps")
onnxmlir_deps()
onnxmlir_deps()

55
src/Dialect/ONNX/BUILD Normal file
View File

@ -0,0 +1,55 @@
package(
default_visibility = ["//visibility:public"],
)
load("@llvm-project//mlir:tblgen.bzl", "gentbl")
gentbl(
name = "OMONNXOpsIncGen",
tbl_outs = [
("-gen-op-decls", "ONNXOps.hpp.inc"),
("-gen-op-defs", "ONNXOps.cpp.inc"),
],
tblgen = "@llvm-project//mlir:mlir-tblgen",
td_file = "ONNXOps.td",
td_includes = ["."],
td_srcs = [
"ONNXOps.td.inc",
# "//src/Interface:PromotableConstOperandsOpInterface.td",
# "//src/Interface:ResultTypeInferenceOpInterface.td",
# "//src/Interface:ShapeInferenceInterface.td",
"@llvm-project//mlir:TdFiles",
],
)
cc_library(
name = "OMONNXOps",
srcs = [
"ONNXOps.cpp",
# "ONNXOps.hpp",
"ONNXOpsHelper.cpp",
# "ONNXOpsHelper.hpp",
],
hdrs = glob([
"**/*.hpp",
"**/*.inc",
]),
deps = [
":OMONNXOpsIncGen",
"@//src/Interface:OMPromotableConstOperandsOpInterface",
"@//src/Interface:OMShapeInferenceOpInterface",
"@//src/Interface:OMResultTypeInferenceOpInterface",
"@onnx",
# "@onnx//:onnx_proto_genproto",
# "@llvm-project//mlir:Affine",
"@llvm-project//mlir:IR",
"@llvm-project//mlir:Shape",
# "@llvm-project//mlir:Support",
# "@llvm-project//mlir:TableGen",
],
)

View File

@ -0,0 +1,89 @@
package(
default_visibility = ["//visibility:public"],
)
load("@llvm-project//mlir:tblgen.bzl", "gentbl")
gentbl(
name = "OMPromotableConstOperandsOpInterfaceIncGen",
tbl_outs = [
("-gen-op-interface-decls", "PromotableConstOperandsOpInterface.hpp.inc"),
("-gen-op-interface-defs", "PromotableConstOperandsOpInterface.cpp.inc"),
],
tblgen = "@llvm-project//mlir:mlir-tblgen",
td_file = "PromotableConstOperandsOpInterface.td",
td_includes = ["."],
td_srcs = [
"@llvm-project//mlir:TdFiles",
],
)
gentbl(
name = "ShapeInferenceOpInterfaceIncGen",
tbl_outs = [
("-gen-op-interface-decls", "ShapeInference.hpp.inc"),
("-gen-op-interface-defs", "ShapeInference.cpp.inc"),
],
tblgen = "@llvm-project//mlir:mlir-tblgen",
td_file = "ShapeInferenceInterface.td",
td_includes = ["."],
td_srcs = [
"@llvm-project//mlir:TdFiles",
],
)
gentbl(
name = "OMResultTypeInferenceOpInterfaceIncGen",
tbl_outs = [
("-gen-op-interface-decls", "ResultTypeInferenceOpInterface.hpp.inc"),
("-gen-op-interface-defs", "ResultTypeInferenceOpInterface.cpp.inc"),
],
tblgen = "@llvm-project//mlir:mlir-tblgen",
td_file = "ResultTypeInferenceOpInterface.td",
td_includes = ["."],
td_srcs = [
"@llvm-project//mlir:TdFiles",
],
)
cc_library(
name = "OMPromotableConstOperandsOpInterface",
srcs = [
"PromotableConstOperandsOpInterface.cpp",
],
hdrs = [
"PromotableConstOperandsOpInterface.hpp",
],
deps = [
":OMPromotableConstOperandsOpInterfaceIncGen",
"@llvm-project//mlir:IR",
],
)
cc_library(
name = "OMShapeInferenceOpInterface",
srcs = [
"ShapeInferenceInterface.cpp",
],
hdrs = [
"ShapeInferenceInterface.hpp",
],
deps = [
":ShapeInferenceOpInterfaceIncGen",
"@llvm-project//mlir:IR",
],
)
cc_library(
name = "OMResultTypeInferenceOpInterface",
srcs = [
"ResultTypeInferenceOpInterface.cpp",
],
hdrs = [
"ResultTypeInferenceOpInterface.hpp",
],
deps = [
":OMResultTypeInferenceOpInterfaceIncGen",
"@llvm-project//mlir:IR",
],
)

765
third_party/build_config.bzl vendored Normal file
View File

@ -0,0 +1,765 @@
# Platform-specific build configurations.
load("@protobuf_archive//:protobuf.bzl", "proto_gen")
# Appends a suffix to a list of deps.
def tf_deps(deps, suffix):
tf_deps = []
# If the package name is in shorthand form (ie: does not contain a ':'),
# expand it to the full name.
for dep in deps:
tf_dep = dep
if not ":" in dep:
dep_pieces = dep.split("/")
tf_dep += ":" + dep_pieces[len(dep_pieces) - 1]
tf_deps += [tf_dep + suffix]
return tf_deps
# Modified from @cython//:Tools/rules.bzl
def pyx_library(
name,
deps = [],
py_deps = [],
srcs = [],
testonly = None,
**kwargs):
"""Compiles a group of .pyx / .pxd / .py files.
First runs Cython to create .cpp files for each input .pyx or .py + .pxd
pair. Then builds a shared object for each, passing "deps" to each cc_binary
rule (includes Python headers by default). Finally, creates a py_library rule
with the shared objects and any pure Python "srcs", with py_deps as its
dependencies; the shared objects can be imported like normal Python files.
Args:
name: Name for the rule.
deps: C/C++ dependencies of the Cython (e.g. Numpy headers).
py_deps: Pure Python dependencies of the final library.
srcs: .py, .pyx, or .pxd files to either compile or pass through.
**kwargs: Extra keyword arguments passed to the py_library.
"""
# First filter out files that should be run compiled vs. passed through.
py_srcs = []
pyx_srcs = []
pxd_srcs = []
for src in srcs:
if src.endswith(".pyx") or (src.endswith(".py") and
src[:-3] + ".pxd" in srcs):
pyx_srcs.append(src)
elif src.endswith(".py"):
py_srcs.append(src)
else:
pxd_srcs.append(src)
if src.endswith("__init__.py"):
pxd_srcs.append(src)
# Invoke cython to produce the shared object libraries.
for filename in pyx_srcs:
native.genrule(
name = filename + "_cython_translation",
srcs = [filename],
outs = [filename.split(".")[0] + ".cpp"],
# Optionally use PYTHON_BIN_PATH on Linux platforms so that python 3
# works. Windows has issues with cython_binary so skip PYTHON_BIN_PATH.
cmd = "PYTHONHASHSEED=0 $(location @cython//:cython_binary) --cplus $(SRCS) --output-file $(OUTS)",
testonly = testonly,
tools = ["@cython//:cython_binary"] + pxd_srcs,
)
shared_objects = []
for src in pyx_srcs:
stem = src.split(".")[0]
shared_object_name = stem + ".so"
native.cc_binary(
name = shared_object_name,
srcs = [stem + ".cpp"],
deps = deps + ["@org_tensorflow//third_party/python_runtime:headers"],
linkshared = 1,
testonly = testonly,
)
shared_objects.append(shared_object_name)
# Now create a py_library with these shared objects as data.
native.py_library(
name = name,
srcs = py_srcs,
deps = py_deps,
srcs_version = "PY2AND3",
data = shared_objects,
testonly = testonly,
**kwargs
)
def _proto_cc_hdrs(srcs, use_grpc_plugin = False):
ret = [s[:-len(".proto")] + ".pb.h" for s in srcs]
if use_grpc_plugin:
ret += [s[:-len(".proto")] + ".grpc.pb.h" for s in srcs]
return ret
def _proto_cc_srcs(srcs, use_grpc_plugin = False):
ret = [s[:-len(".proto")] + ".pb.cc" for s in srcs]
if use_grpc_plugin:
ret += [s[:-len(".proto")] + ".grpc.pb.cc" for s in srcs]
return ret
def _proto_py_outs(srcs, use_grpc_plugin = False):
ret = [s[:-len(".proto")] + "_pb2.py" for s in srcs]
if use_grpc_plugin:
ret += [s[:-len(".proto")] + "_pb2_grpc.py" for s in srcs]
return ret
# Re-defined protocol buffer rule to allow building "header only" protocol
# buffers, to avoid duplicate registrations. Also allows non-iterable cc_libs
# containing select() statements.
def cc_proto_library(
name,
srcs = [],
deps = [],
cc_libs = [],
include = None,
protoc = "@protobuf_archive//:protoc",
internal_bootstrap_hack = False,
use_grpc_plugin = False,
use_grpc_namespace = False,
default_header = False,
**kargs):
"""Bazel rule to create a C++ protobuf library from proto source files.
Args:
name: the name of the cc_proto_library.
srcs: the .proto files of the cc_proto_library.
deps: a list of dependency labels; must be cc_proto_library.
cc_libs: a list of other cc_library targets depended by the generated
cc_library.
include: a string indicating the include path of the .proto files.
protoc: the label of the protocol compiler to generate the sources.
internal_bootstrap_hack: a flag indicate the cc_proto_library is used only
for bootstraping. When it is set to True, no files will be generated.
The rule will simply be a provider for .proto files, so that other
cc_proto_library can depend on it.
use_grpc_plugin: a flag to indicate whether to call the grpc C++ plugin
when processing the proto files.
default_header: Controls the naming of generated rules. If True, the `name`
rule will be header-only, and an _impl rule will contain the
implementation. Otherwise the header-only rule (name + "_headers_only")
must be referred to explicitly.
**kargs: other keyword arguments that are passed to cc_library.
"""
includes = []
if include != None:
includes = [include]
if internal_bootstrap_hack:
# For pre-checked-in generated files, we add the internal_bootstrap_hack
# which will skip the codegen action.
proto_gen(
name = name + "_genproto",
srcs = srcs,
includes = includes,
protoc = protoc,
visibility = ["//visibility:public"],
deps = [s + "_genproto" for s in deps],
)
# An empty cc_library to make rule dependency consistent.
native.cc_library(
name = name,
**kargs
)
return
grpc_cpp_plugin = None
plugin_options = []
if use_grpc_plugin:
grpc_cpp_plugin = "//external:grpc_cpp_plugin"
if use_grpc_namespace:
plugin_options = ["services_namespace=grpc"]
gen_srcs = _proto_cc_srcs(srcs, use_grpc_plugin)
gen_hdrs = _proto_cc_hdrs(srcs, use_grpc_plugin)
outs = gen_srcs + gen_hdrs
proto_gen(
name = name + "_genproto",
srcs = srcs,
outs = outs,
gen_cc = 1,
includes = includes,
plugin = grpc_cpp_plugin,
plugin_language = "grpc",
plugin_options = plugin_options,
protoc = protoc,
visibility = ["//visibility:public"],
deps = [s + "_genproto" for s in deps],
)
if use_grpc_plugin:
cc_libs += select({
"//tensorflow:linux_s390x": ["//external:grpc_lib_unsecure"],
"//conditions:default": ["//external:grpc_lib"],
})
if default_header:
header_only_name = name
impl_name = name + "_impl"
else:
header_only_name = name + "_headers_only"
impl_name = name
native.cc_library(
name = impl_name,
srcs = gen_srcs,
hdrs = gen_hdrs,
deps = cc_libs + deps,
includes = includes,
**kargs
)
native.cc_library(
name = header_only_name,
deps = ["@protobuf_archive//:protobuf_headers"],
hdrs = gen_hdrs,
**kargs
)
# Re-defined protocol buffer rule to bring in the change introduced in commit
# https://github.com/google/protobuf/commit/294b5758c373cbab4b72f35f4cb62dc1d8332b68
# which was not part of a stable protobuf release in 04/2018.
# TODO(jsimsa): Remove this once the protobuf dependency version is updated
# to include the above commit.
def py_proto_library(
name,
srcs = [],
deps = [],
py_libs = [],
py_extra_srcs = [],
include = None,
default_runtime = "@protobuf_archive//:protobuf_python",
protoc = "@protobuf_archive//:protoc",
use_grpc_plugin = False,
**kargs):
"""Bazel rule to create a Python protobuf library from proto source files
NOTE: the rule is only an internal workaround to generate protos. The
interface may change and the rule may be removed when bazel has introduced
the native rule.
Args:
name: the name of the py_proto_library.
srcs: the .proto files of the py_proto_library.
deps: a list of dependency labels; must be py_proto_library.
py_libs: a list of other py_library targets depended by the generated
py_library.
py_extra_srcs: extra source files that will be added to the output
py_library. This attribute is used for internal bootstrapping.
include: a string indicating the include path of the .proto files.
default_runtime: the implicitly default runtime which will be depended on by
the generated py_library target.
protoc: the label of the protocol compiler to generate the sources.
use_grpc_plugin: a flag to indicate whether to call the Python C++ plugin
when processing the proto files.
**kargs: other keyword arguments that are passed to cc_library.
"""
outs = _proto_py_outs(srcs, use_grpc_plugin)
includes = []
if include != None:
includes = [include]
grpc_python_plugin = None
if use_grpc_plugin:
grpc_python_plugin = "//external:grpc_python_plugin"
# Note: Generated grpc code depends on Python grpc module. This dependency
# is not explicitly listed in py_libs. Instead, host system is assumed to
# have grpc installed.
proto_gen(
name = name + "_genproto",
srcs = srcs,
outs = outs,
gen_py = 1,
includes = includes,
plugin = grpc_python_plugin,
plugin_language = "grpc",
protoc = protoc,
visibility = ["//visibility:public"],
deps = [s + "_genproto" for s in deps],
)
if default_runtime and not default_runtime in py_libs + deps:
py_libs = py_libs + [default_runtime]
native.py_library(
name = name,
srcs = outs + py_extra_srcs,
deps = py_libs + deps,
imports = includes,
**kargs
)
def tf_proto_library_cc(
name,
srcs = [],
has_services = None,
protodeps = [],
visibility = None,
testonly = 0,
cc_libs = [],
cc_stubby_versions = None,
cc_grpc_version = None,
j2objc_api_version = 1,
cc_api_version = 2,
js_codegen = "jspb",
default_header = False):
js_codegen = js_codegen # unused argument
native.filegroup(
name = name + "_proto_srcs",
srcs = srcs + tf_deps(protodeps, "_proto_srcs"),
testonly = testonly,
visibility = visibility,
)
use_grpc_plugin = None
if cc_grpc_version:
use_grpc_plugin = True
cc_deps = tf_deps(protodeps, "_cc")
cc_name = name + "_cc"
if not srcs:
# This is a collection of sub-libraries. Build header-only and impl
# libraries containing all the sources.
proto_gen(
name = cc_name + "_genproto",
protoc = "@protobuf_archive//:protoc",
visibility = ["//visibility:public"],
deps = [s + "_genproto" for s in cc_deps],
)
native.cc_library(
name = cc_name,
deps = cc_deps + ["@protobuf_archive//:protobuf_headers"],
testonly = testonly,
visibility = visibility,
)
native.cc_library(
name = cc_name + "_impl",
deps = [s + "_impl" for s in cc_deps] + ["@protobuf_archive//:cc_wkt_protos"],
)
return
cc_proto_library(
name = cc_name,
testonly = testonly,
srcs = srcs,
cc_libs = cc_libs,
copts = [],
default_header = default_header,
protoc = "@protobuf_archive//:protoc",
use_grpc_plugin = use_grpc_plugin,
visibility = visibility,
deps = cc_deps + ["@protobuf_archive//:cc_wkt_protos"],
)
def tf_proto_library_py(
name,
srcs = [],
protodeps = [],
deps = [],
visibility = None,
testonly = 0,
srcs_version = "PY2AND3",
use_grpc_plugin = False):
py_deps = tf_deps(protodeps, "_py")
py_name = name + "_py"
if not srcs:
# This is a collection of sub-libraries. Build header-only and impl
# libraries containing all the sources.
proto_gen(
name = py_name + "_genproto",
protoc = "@protobuf_archive//:protoc",
visibility = ["//visibility:public"],
deps = [s + "_genproto" for s in py_deps],
)
native.py_library(
name = py_name,
deps = py_deps + ["@protobuf_archive//:protobuf_python"],
testonly = testonly,
visibility = visibility,
)
return
py_proto_library(
name = py_name,
testonly = testonly,
srcs = srcs,
default_runtime = "@protobuf_archive//:protobuf_python",
protoc = "@protobuf_archive//:protoc",
srcs_version = srcs_version,
use_grpc_plugin = use_grpc_plugin,
visibility = visibility,
deps = deps + py_deps + ["@protobuf_archive//:protobuf_python"],
)
def tf_jspb_proto_library(**kwargs):
pass
def tf_nano_proto_library(**kwargs):
pass
def tf_proto_library(
name,
srcs = [],
has_services = None,
protodeps = [],
visibility = None,
testonly = 0,
cc_libs = [],
cc_api_version = 2,
cc_grpc_version = None,
j2objc_api_version = 1,
js_codegen = "jspb",
provide_cc_alias = False,
default_header = False):
"""Make a proto library, possibly depending on other proto libraries."""
_ignore = (js_codegen, provide_cc_alias)
tf_proto_library_cc(
name = name,
testonly = testonly,
srcs = srcs,
cc_grpc_version = cc_grpc_version,
cc_libs = cc_libs,
default_header = default_header,
protodeps = protodeps,
visibility = visibility,
)
tf_proto_library_py(
name = name,
testonly = testonly,
srcs = srcs,
protodeps = protodeps,
srcs_version = "PY2AND3",
use_grpc_plugin = has_services,
visibility = visibility,
)
# A list of all files under platform matching the pattern in 'files'. In
# contrast with 'tf_platform_srcs' below, which seletive collects files that
# must be compiled in the 'default' platform, this is a list of all headers
# mentioned in the platform/* files.
def tf_platform_hdrs(files):
return native.glob(["platform/*/" + f for f in files])
def tf_platform_srcs(files):
base_set = ["platform/default/" + f for f in files]
windows_set = base_set + ["platform/windows/" + f for f in files]
posix_set = base_set + ["platform/posix/" + f for f in files]
# Handle cases where we must also bring the posix file in. Usually, the list
# of files to build on windows builds is just all the stuff in the
# windows_set. However, in some cases the implementations in 'posix/' are
# just what is necessary and historically we choose to simply use the posix
# file instead of making a copy in 'windows'.
for f in files:
if f == "error.cc":
windows_set.append("platform/posix/" + f)
return select({
"//tensorflow:windows": native.glob(windows_set),
"//conditions:default": native.glob(posix_set),
})
def tf_additional_lib_hdrs(exclude = []):
windows_hdrs = native.glob([
"platform/default/*.h",
"platform/windows/*.h",
"platform/posix/error.h",
], exclude = exclude)
return select({
"//tensorflow:windows": windows_hdrs,
"//conditions:default": native.glob([
"platform/default/*.h",
"platform/posix/*.h",
], exclude = exclude),
})
def tf_additional_lib_srcs(exclude = []):
windows_srcs = native.glob([
"platform/default/*.cc",
"platform/windows/*.cc",
"platform/posix/error.cc",
], exclude = exclude)
return select({
"//tensorflow:windows": windows_srcs,
"//conditions:default": native.glob([
"platform/default/*.cc",
"platform/posix/*.cc",
], exclude = exclude),
})
def tf_additional_monitoring_hdrs():
return []
def tf_additional_monitoring_srcs():
return [
"platform/default/monitoring.cc",
]
def tf_additional_minimal_lib_srcs():
return [
"platform/default/integral_types.h",
"platform/default/mutex.h",
]
def tf_additional_proto_hdrs():
return [
"platform/default/integral_types.h",
"platform/default/logging.h",
]
def tf_additional_proto_srcs():
return [
"platform/protobuf.cc",
]
def tf_additional_human_readable_json_deps():
return []
def tf_additional_all_protos():
return ["//tensorflow/core:protos_all"]
def tf_protos_all_impl():
return [
"//tensorflow/core:autotuning_proto_cc_impl",
"//tensorflow/core:conv_autotuning_proto_cc_impl",
"//tensorflow/core:protos_all_cc_impl",
]
def tf_profiler_all_protos():
return ["//tensorflow/core/profiler:protos_all"]
def tf_grpc_service_all():
return [
"//tensorflow/core/profiler:profiler_analysis_proto_cc",
"//tensorflow/core/profiler:profiler_service_proto_cc",
]
def tf_protos_grappler_impl():
return ["//tensorflow/core/grappler/costs:op_performance_data_cc_impl"]
def tf_additional_cupti_wrapper_deps():
return [
"//tensorflow/stream_executor/cuda:cupti_stub",
"@com_google_absl//absl/base",
"@com_google_absl//absl/strings",
"@com_google_absl//absl/strings:str_format",
"@com_google_absl//absl/container:node_hash_map",
"@com_google_absl//absl/container:flat_hash_map",
]
def tf_additional_device_tracer_srcs():
return ["platform/default/device_tracer.cc"]
def tf_additional_device_tracer_cuda_deps():
return []
def tf_additional_device_tracer_deps():
return [
"//tensorflow/core/profiler/lib:traceme",
"//tensorflow/core/profiler/internal/cpu:host_tracer",
]
def tf_additional_device_tracer_test_flags():
return []
def tf_additional_profiler_lib_deps():
return [
"//tensorflow/core/profiler/internal/cpu:host_tracer",
]
def tf_additional_libdevice_data():
return []
def tf_additional_libdevice_deps():
return ["@local_config_cuda//cuda:cuda_headers"]
def tf_additional_libdevice_srcs():
return ["platform/default/cuda_libdevice_path.cc"]
def tf_additional_test_deps():
return []
def tf_additional_test_srcs():
return [
"platform/default/test_benchmark.cc",
] + select({
"//tensorflow:windows": [
"platform/windows/test.cc",
],
"//conditions:default": [
"platform/posix/test.cc",
],
})
def tf_kernel_tests_linkstatic():
return 0
def tf_additional_lib_defines():
"""Additional defines needed to build TF libraries."""
return []
def tf_additional_lib_deps():
"""Additional dependencies needed to build TF libraries."""
return [
"@com_google_absl//absl/base:base",
"@com_google_absl//absl/container:inlined_vector",
"@com_google_absl//absl/types:span",
"@com_google_absl//absl/types:optional",
]
def tf_additional_core_deps():
return select({
"//tensorflow:android": [],
"//tensorflow:ios": [],
"//tensorflow:linux_s390x": [],
"//tensorflow:windows": [],
"//tensorflow:no_gcp_support": [],
"//conditions:default": [
"//tensorflow/core/platform/cloud:gcs_file_system",
],
}) + select({
"//tensorflow:android": [],
"//tensorflow:ios": [],
"//tensorflow:linux_s390x": [],
"//tensorflow:windows": [],
"//tensorflow:no_hdfs_support": [],
"//conditions:default": [
"//tensorflow/core/platform/hadoop:hadoop_file_system",
],
}) + select({
"//tensorflow:android": [],
"//tensorflow:ios": [],
"//tensorflow:linux_s390x": [],
"//tensorflow:windows": [],
"//tensorflow:no_aws_support": [],
"//conditions:default": [
"//tensorflow/core/platform/s3:s3_file_system",
],
})
# TODO(jart, jhseu): Delete when GCP is default on.
def tf_additional_cloud_op_deps():
return select({
"//tensorflow:android": [],
"//tensorflow:ios": [],
"//tensorflow:linux_s390x": [],
"//tensorflow:windows": [],
"//tensorflow:api_version_2": [],
"//tensorflow:windows_and_api_version_2": [],
"//tensorflow:no_gcp_support": [],
"//conditions:default": [
"//tensorflow/contrib/cloud:bigquery_reader_ops_op_lib",
"//tensorflow/contrib/cloud:gcs_config_ops_op_lib",
],
})
# TODO(jhseu): Delete when GCP is default on.
def tf_additional_cloud_kernel_deps():
return select({
"//tensorflow:android": [],
"//tensorflow:ios": [],
"//tensorflow:linux_s390x": [],
"//tensorflow:windows": [],
"//tensorflow:api_version_2": [],
"//tensorflow:windows_and_api_version_2": [],
"//tensorflow:no_gcp_support": [],
"//conditions:default": [
"//tensorflow/contrib/cloud/kernels:bigquery_reader_ops",
"//tensorflow/contrib/cloud/kernels:gcs_config_ops",
],
})
def tf_lib_proto_parsing_deps():
return [
":protos_all_cc",
"//third_party/eigen3",
"//tensorflow/core/platform/default/build_config:proto_parsing",
]
def tf_lib_proto_compiler_deps():
return [
"@protobuf_archive//:protoc_lib",
]
def tf_additional_verbs_lib_defines():
return select({
"//tensorflow:with_verbs_support": ["TENSORFLOW_USE_VERBS"],
"//conditions:default": [],
})
def tf_additional_mpi_lib_defines():
return select({
"//tensorflow:with_mpi_support": ["TENSORFLOW_USE_MPI"],
"//conditions:default": [],
})
def tf_additional_gdr_lib_defines():
return select({
"//tensorflow:with_gdr_support": ["TENSORFLOW_USE_GDR"],
"//conditions:default": [],
})
def tf_additional_numa_lib_defines():
return select({
"//tensorflow:with_numa_support": ["TENSORFLOW_USE_NUMA"],
"//conditions:default": [],
})
def tf_py_clif_cc(name, visibility = None, **kwargs):
pass
def tf_pyclif_proto_library(
name,
proto_lib,
proto_srcfile = "",
visibility = None,
**kwargs):
pass
def tf_additional_binary_deps():
return ["@nsync//:nsync_cpp"] + [
# TODO(allenl): Split these out into their own shared objects (they are
# here because they are shared between contrib/ op shared objects and
# core).
"//tensorflow/core/kernels:lookup_util",
"//tensorflow/core/util/tensor_bundle",
]
def tf_additional_numa_deps():
return select({
"//tensorflow:android": [],
"//tensorflow:ios": [],
"//tensorflow:windows": [],
"//tensorflow:macos": [],
"//conditions:default": [
"@hwloc",
],
})
def tf_additional_numa_copts():
return select({
"//tensorflow:android": [],
"//tensorflow:ios": [],
"//tensorflow:windows": [],
"//tensorflow:macos": [],
"//conditions:default": [
"-Ithird_party/hwloc/hwloc-master/include",
"-DTENSORFLOW_USE_NUMA",
],
})

81
third_party/onnx.BUILD vendored Normal file
View File

@ -0,0 +1,81 @@
package(default_visibility = ["//visibility:public"])
load("@rules_cc//cc:defs.bzl", "cc_library")
load("@rules_proto//proto:defs.bzl", "proto_library")
filegroup(
name = "all",
srcs = glob(["**"]),
)
# load("@org_tensorflow//tensorflow/core/platform:build_config.bzl", "tf_proto_library_cc")
# load("@protobuf_archive//:protobuf.bzl", "proto_gen")
# load("@//third_party:build_config.bzl", "tf_proto_library_cc")
# proto_gen(
# name = "onnx_proto_genproto",
# srcs = ["onnx/onnx.proto"],
# outs = ["onnx/onnx.pb.cc","onnx/onnx.pb.h"],
# gen_cc = 1,
# includes = [],
# plugin = None,
# plugin_language = "grpc",
# plugin_options = None,
# protoc = "@protobuf_archive//:protoc",
# visibility = ["//visibility:public"],
# # deps = [s + "_genproto" for s in deps],
# )
cc_proto_library(
name = "onnx_proto_cc",
deps = [":onnx_proto"],
)
proto_library(
name = "onnx_proto",
srcs = ["onnx/onnx.proto"],
)
cc_proto_library(
name = "onnx_operators_proto_cc",
deps = [":onnx_proto"],
)
proto_library(
name = "onnx_operators_proto",
srcs = ["onnx/onnx-operators.proto"],
)
# tf_proto_library_cc(
# name = "onnx_proto",
# srcs = ["onnx/onnx-operators.proto"],
# cc_api_version = 2,
# )
# tf_proto_library_cc(
# name = "onnx_operators_proto",
# srcs = ["onnx/onnx-operators.proto"],
# cc_api_version = 2,
# protodeps = [":onnx_proto"],
# )
cc_library(
name = "onnx",
srcs = glob([
"*.c",
"*.cc",
"common/**/*.c",
"common/**/*.cc",
]),
hdrs = glob([
"**/*.h",
"**/*.hpp",
]),
deps = [
# ":onnx_proto_genproto",
":onnx_operators_proto_cc",
":onnx_proto_cc",
],
)

105
third_party/protobuf.BUILD vendored Normal file
View File

@ -0,0 +1,105 @@
load(
"@protobuf_archive//:protobuf.bzl",
"proto_gen",
"py_proto_library",
"cc_proto_library",
)
licenses(["notice"])
filegroup(
name = "LICENSE",
visibility = ["//visibility:public"],
)
HEADERS = [
"google/protobuf/any.pb.h",
"google/protobuf/any.proto",
"google/protobuf/arena.h",
"google/protobuf/compiler/importer.h",
"google/protobuf/descriptor.h",
"google/protobuf/descriptor.pb.h",
"google/protobuf/descriptor.proto",
"google/protobuf/duration.pb.h",
"google/protobuf/duration.proto",
"google/protobuf/dynamic_message.h",
"google/protobuf/empty.pb.h",
"google/protobuf/empty.proto",
"google/protobuf/field_mask.pb.h",
"google/protobuf/field_mask.proto",
"google/protobuf/io/coded_stream.h",
"google/protobuf/io/zero_copy_stream.h",
"google/protobuf/io/zero_copy_stream_impl_lite.h",
"google/protobuf/map.h",
"google/protobuf/port_def.inc",
"google/protobuf/repeated_field.h",
"google/protobuf/text_format.h",
"google/protobuf/timestamp.pb.h",
"google/protobuf/timestamp.proto",
"google/protobuf/util/json_util.h",
"google/protobuf/util/type_resolver_util.h",
"google/protobuf/wrappers.pb.h",
"google/protobuf/wrappers.proto",
]
genrule(
name = "link_headers",
outs = HEADERS,
cmd = """
for i in $(OUTS); do
f=$${i#$(@D)/}
mkdir -p $(@D)/$${f%/*}
ln -sf $(INCLUDEDIR)/$$f $(@D)/$$f
done
""",
)
cc_library(
name = "protobuf",
hdrs = HEADERS,
linkopts = ["-lprotobuf"],
visibility = ["//visibility:public"],
)
cc_library(
name = "protobuf_headers",
hdrs = HEADERS,
linkopts = ["-lprotobuf"],
visibility = ["//visibility:public"],
)
cc_library(
name = "protoc_lib",
linkopts = ["-lprotoc"],
visibility = ["//visibility:public"],
)
genrule(
name = "protoc",
outs = ["protoc.bin"],
cmd = "ln -s $$(which protoc) $@",
executable = 1,
visibility = ["//visibility:public"],
)
cc_proto_library(
name = "cc_wkt_protos",
hdrs = HEADERS,
internal_bootstrap_hack = 1,
protoc = ":protoc",
visibility = ["//visibility:public"],
)
proto_gen(
name = "protobuf_python_genproto",
includes = ["."],
protoc = "@protobuf_archive//:protoc",
visibility = ["//visibility:public"],
)
py_library(
name = "protobuf_python",
data = [":link_headers"],
srcs_version = "PY2AND3",
visibility = ["//visibility:public"],
)

223
third_party/repo.bzl vendored Normal file
View File

@ -0,0 +1,223 @@
# Copyright 2017 The TensorFlow Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for defining TensorFlow Bazel dependencies."""
_SINGLE_URL_WHITELIST = depset([
"arm_compiler",
])
def _is_windows(ctx):
return ctx.os.name.lower().find("windows") != -1
def _wrap_bash_cmd(ctx, cmd):
if _is_windows(ctx):
bazel_sh = _get_env_var(ctx, "BAZEL_SH")
if not bazel_sh:
fail("BAZEL_SH environment variable is not set")
cmd = [bazel_sh, "-l", "-c", " ".join(["\"%s\"" % s for s in cmd])]
return cmd
def _get_env_var(ctx, name):
if name in ctx.os.environ:
return ctx.os.environ[name]
else:
return None
# Checks if we should use the system lib instead of the bundled one
def _use_system_lib(ctx, name):
syslibenv = _get_env_var(ctx, "TF_SYSTEM_LIBS")
if syslibenv:
for n in syslibenv.strip().split(","):
if n.strip() == name:
return True
return False
# Executes specified command with arguments and calls 'fail' if it exited with
# non-zero code
def _execute_and_check_ret_code(repo_ctx, cmd_and_args):
result = repo_ctx.execute(cmd_and_args, timeout = 60)
if result.return_code != 0:
fail(("Non-zero return code({1}) when executing '{0}':\n" + "Stdout: {2}\n" +
"Stderr: {3}").format(
" ".join(cmd_and_args),
result.return_code,
result.stdout,
result.stderr,
))
def _repos_are_siblings():
return Label("@foo//bar").workspace_root.startswith("../")
# Apply a patch_file to the repository root directory
# Runs 'patch -p1'
def _apply_patch(ctx, patch_file):
# Don't check patch on Windows, because patch is only available under bash.
if not _is_windows(ctx) and not ctx.which("patch"):
fail("patch command is not found, please install it")
cmd = _wrap_bash_cmd(
ctx,
["patch", "-p1", "-d", ctx.path("."), "-i", ctx.path(patch_file)],
)
_execute_and_check_ret_code(ctx, cmd)
def _apply_delete(ctx, paths):
for path in paths:
if path.startswith("/"):
fail("refusing to rm -rf path starting with '/': " + path)
if ".." in path:
fail("refusing to rm -rf path containing '..': " + path)
cmd = _wrap_bash_cmd(ctx, ["rm", "-rf"] + [ctx.path(path) for path in paths])
_execute_and_check_ret_code(ctx, cmd)
def _tf_http_archive(ctx):
if ("mirror.tensorflow.org" not in ctx.attr.urls[0] and
(len(ctx.attr.urls) < 2 and
ctx.attr.name not in _SINGLE_URL_WHITELIST.to_list())):
fail("tf_http_archive(urls) must have redundant URLs. The " +
"mirror.tensorflow.org URL must be present and it must come first. " +
"Even if you don't have permission to mirror the file, please " +
"put the correctly formatted mirror URL there anyway, because " +
"someone will come along shortly thereafter and mirror the file.")
use_syslib = _use_system_lib(ctx, ctx.attr.name)
if not use_syslib:
ctx.download_and_extract(
ctx.attr.urls,
"",
ctx.attr.sha256,
ctx.attr.type,
ctx.attr.strip_prefix,
)
if ctx.attr.delete:
_apply_delete(ctx, ctx.attr.delete)
if ctx.attr.patch_file != None:
_apply_patch(ctx, ctx.attr.patch_file)
if use_syslib and ctx.attr.system_build_file != None:
# Use BUILD.bazel to avoid conflict with third party projects with
# BUILD or build (directory) underneath.
ctx.template("BUILD.bazel", ctx.attr.system_build_file, {
"%prefix%": ".." if _repos_are_siblings() else "external",
}, False)
elif ctx.attr.build_file != None:
# Use BUILD.bazel to avoid conflict with third party projects with
# BUILD or build (directory) underneath.
ctx.template("BUILD.bazel", ctx.attr.build_file, {
"%prefix%": ".." if _repos_are_siblings() else "external",
}, False)
if use_syslib:
for internal_src, external_dest in ctx.attr.system_link_files.items():
ctx.symlink(Label(internal_src), ctx.path(external_dest))
tf_http_archive = repository_rule(
implementation = _tf_http_archive,
attrs = {
"sha256": attr.string(mandatory = True),
"urls": attr.string_list(mandatory = True, allow_empty = False),
"strip_prefix": attr.string(),
"type": attr.string(),
"delete": attr.string_list(),
"patch_file": attr.label(),
"build_file": attr.label(),
"system_build_file": attr.label(),
"system_link_files": attr.string_dict(),
},
environ = [
"TF_SYSTEM_LIBS",
],
)
"""Downloads and creates Bazel repos for dependencies.
This is a swappable replacement for both http_archive() and
new_http_archive() that offers some additional features. It also helps
ensure best practices are followed.
"""
def _third_party_http_archive(ctx):
if ("mirror.tensorflow.org" not in ctx.attr.urls[0] and
(len(ctx.attr.urls) < 2 and
ctx.attr.name not in _SINGLE_URL_WHITELIST.to_list())):
fail("tf_http_archive(urls) must have redundant URLs. The " +
"mirror.tensorflow.org URL must be present and it must come first. " +
"Even if you don't have permission to mirror the file, please " +
"put the correctly formatted mirror URL there anyway, because " +
"someone will come along shortly thereafter and mirror the file.")
use_syslib = _use_system_lib(ctx, ctx.attr.name)
# Use "BUILD.bazel" to avoid conflict with third party projects that contain a
# file or directory called "BUILD"
buildfile_path = ctx.path("BUILD.bazel")
if use_syslib:
if ctx.attr.system_build_file == None:
fail("Bazel was configured with TF_SYSTEM_LIBS to use a system " +
"library for %s, but no system build file for %s was configured. " +
"Please add a system_build_file attribute to the repository rule" +
"for %s." % (ctx.attr.name, ctx.attr.name, ctx.attr.name))
ctx.symlink(Label(ctx.attr.system_build_file), buildfile_path)
else:
ctx.download_and_extract(
ctx.attr.urls,
"",
ctx.attr.sha256,
ctx.attr.type,
ctx.attr.strip_prefix,
)
if ctx.attr.delete:
_apply_delete(ctx, ctx.attr.delete)
if ctx.attr.patch_file != None:
_apply_patch(ctx, ctx.attr.patch_file)
ctx.symlink(Label(ctx.attr.build_file), buildfile_path)
link_dict = {}
if use_syslib:
link_dict.update(ctx.attr.system_link_files)
for internal_src, external_dest in ctx.attr.link_files.items():
# if syslib and link exists in both, use the system one
if external_dest not in link_dict.values():
link_dict[internal_src] = external_dest
for internal_src, external_dest in link_dict.items():
ctx.symlink(Label(internal_src), ctx.path(external_dest))
# Downloads and creates Bazel repos for dependencies.
#
# This is an upgrade for tf_http_archive that works with go/tfbr-thirdparty.
#
# For link_files, specify each dict entry as:
# "//path/to/source:file": "localfile"
third_party_http_archive = repository_rule(
implementation = _third_party_http_archive,
attrs = {
"sha256": attr.string(mandatory = True),
"urls": attr.string_list(mandatory = True, allow_empty = False),
"strip_prefix": attr.string(),
"type": attr.string(),
"delete": attr.string_list(),
"build_file": attr.string(mandatory = True),
"system_build_file": attr.string(mandatory = False),
"patch_file": attr.label(),
"link_files": attr.string_dict(),
"system_link_files": attr.string_dict(),
},
environ = [
"TF_SYSTEM_LIBS",
],
)