From 4cdc0873cac4988da54fdb3fcb8c939524fabc23 Mon Sep 17 00:00:00 2001 From: Tian Jin Date: Tue, 19 May 2020 10:15:48 +0800 Subject: [PATCH] Call llc, ld from within onnx-mlir. (#127) * Call llc, ld from within onnx-mlir. * Rename EmitLLVMBC -> EmitLib., reorder header files * Edit comment. Co-authored-by: Gheorghe-Teodor Bercea --- src/CMakeLists.txt | 12 ++++++++++++ src/ExternalUtil.hpp.in | 9 +++++++++ src/MainUtils.cpp | 40 +++++++++++++++++++++++++++++++--------- src/MainUtils.hpp | 6 +++--- src/main.cpp | 7 ++++--- test/backend/test.py | 9 --------- 6 files changed, 59 insertions(+), 24 deletions(-) create mode 100644 src/ExternalUtil.hpp.in diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index aa0a02f..2f85112 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -11,6 +11,18 @@ add_executable(onnx-mlir MainUtils.cpp main.cpp) +# Locate llc, which is needed for translating LLVM bitcode +# to object file. +if(NOT EXISTS "${LLVM_PROJ_BUILD}/bin/llc") + message(ERROR "Cannot find llc.") +endif() + +# Get the compiler command name, the C++ compiler is needed to to translate +# object files to shared libraries. +get_filename_component(CXX_COMPILER_FILENAME ${CMAKE_CXX_COMPILER} NAME) +configure_file(${CMAKE_CURRENT_SOURCE_DIR}/ExternalUtil.hpp.in + ${CMAKE_CURRENT_BINARY_DIR}/ExternalUtil.hpp) + set(ONNX_MLIR_LD_PRELOAD_onnx-mlir "" CACHE STRING "" FORCE) whole_archive_link_mlir(onnx-mlir ${MLIRWholeArchiveLibs}) if(BUILD_SHARED_LIBS) diff --git a/src/ExternalUtil.hpp.in b/src/ExternalUtil.hpp.in new file mode 100644 index 0000000..8defdef --- /dev/null +++ b/src/ExternalUtil.hpp.in @@ -0,0 +1,9 @@ +#pragma once +#include + +namespace onnx_mlir { +const std::string kLlcPath = "@LLVM_PROJ_BUILD@/bin/llc"; +const std::string kCxxPath = "@CMAKE_CXX_COMPILER@"; +const std::string kCxxFileName = "@CXX_COMPILER_FILENAME@"; +const std::string kRuntimeDirPath = "@CMAKE_BINARY_DIR@/lib"; +} diff --git a/src/MainUtils.cpp b/src/MainUtils.cpp index 1c7d164..70b99cc 100644 --- a/src/MainUtils.cpp +++ b/src/MainUtils.cpp @@ -8,9 +8,13 @@ // //===----------------------------------------------------------------------===// -#include "src/MainUtils.hpp" +#include #include -#include + +#include "llvm/Support/Program.h" + +#include "src/ExternalUtil.hpp" +#include "src/MainUtils.hpp" #ifdef _WIN32 #include @@ -43,14 +47,33 @@ void LoadMLIR(string inputFilename, mlir::MLIRContext &context, } } -void EmitLLVMBitCode( - const mlir::OwningModuleRef &module, string outputFilename) { +void compileModuleToSharedLibrary( + const mlir::OwningModuleRef &module, string outputBaseName) { + // Write LLVM bitcode. + string outputFilename = outputBaseName + ".bc"; error_code error; llvm::raw_fd_ostream moduleBitcodeStream( outputFilename, error, llvm::sys::fs::F_None); llvm::WriteBitcodeToFile( *mlir::translateModuleToLLVMIR(*module), moduleBitcodeStream); moduleBitcodeStream.flush(); + + // Compile bitcode to object file. + std::vector llcArgs = { + "llc", "-filetype=obj", "-relocation-model=pic", outputFilename}; + auto llcArgStrRefs = + std::vector(llcArgs.begin(), llcArgs.end()); + llvm::sys::ExecuteAndWait(kLlcPath, llvm::makeArrayRef(llcArgStrRefs)); + + // Link with runtime. + // TODO(tjingrant): link with runtime library in LLVM, and make the shared + // library more self-contained. + std::vector cxxArgs = {kCxxFileName, "-shared", "-fPIC", + outputBaseName + ".o", "-o", outputBaseName + ".so", + "-L" + kRuntimeDirPath, "-lcruntime", "-Wl,-rpath," + kRuntimeDirPath}; + auto argsArrayRefVector = + std::vector(cxxArgs.begin(), cxxArgs.end()); + llvm::sys::ExecuteAndWait(kCxxPath, llvm::makeArrayRef(argsArrayRefVector)); } void registerDialects() { @@ -149,11 +172,10 @@ void emitOutputFiles(string outputBaseName, EmissionTargetType emissionTarget, // outside the function code at the beginning of the file in which case the // elision of these constants is not strictly required. Elision is also not // necessary when emitting the .bc file. - if (emissionTarget == EmitLLVMBC) { - // Write LLVM bitcode to disk. - string outputFilename = outputBaseName + ".bc"; - EmitLLVMBitCode(module, outputFilename); - printf("LLVM bitcode written to %s\n", outputFilename.c_str()); + if (emissionTarget == EmitLib) { + // Write LLVM bitcode to disk, compile & link. + compileModuleToSharedLibrary(module, outputBaseName); + printf("Shared library %s.so has been compiled.", outputBaseName.c_str()); } else { // Emit the version with all constants included. outputCode(module, outputBaseName, ".onnx.mlir"); diff --git a/src/MainUtils.hpp b/src/MainUtils.hpp index 6eb8802..9cc937f 100644 --- a/src/MainUtils.hpp +++ b/src/MainUtils.hpp @@ -42,14 +42,14 @@ enum EmissionTargetType { EmitONNXIR, EmitMLIR, EmitLLVMIR, - EmitLLVMBC, + EmitLib, }; void LoadMLIR(std::string inputFilename, mlir::MLIRContext &context, mlir::OwningModuleRef &module); -void EmitLLVMBitCode( - const mlir::OwningModuleRef &module, std::string outputFilename); +void compileModuleToSharedLibrary( + const mlir::OwningModuleRef &module, std::string outputBaseName); void registerDialects(); diff --git a/src/main.cpp b/src/main.cpp index 80faeda..ebde6ed 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -31,9 +31,10 @@ int main(int argc, char *argv[]) { clEnumVal( EmitMLIR, "Lower model to MLIR built-in transformation dialect."), clEnumVal(EmitLLVMIR, "Lower model to LLVM IR (LLVM dialect)."), - clEnumVal(EmitLLVMBC, "Lower model to LLVM IR and emit (to file) " - "LLVM bitcode for model.")), - llvm::cl::init(EmitLLVMBC), llvm::cl::cat(OnnxMlirOptions)); + clEnumVal(EmitLib, "Lower model to LLVM IR, emit (to file) " + "LLVM bitcode for model, compile and link it to a " + "shared library.")), + llvm::cl::init(EmitLib), llvm::cl::cat(OnnxMlirOptions)); llvm::cl::HideUnrelatedOptions(OnnxMlirOptions); llvm::cl::ParseCommandLineOptions( diff --git a/test/backend/test.py b/test/backend/test.py index 8722351..e6d352d 100644 --- a/test/backend/test.py +++ b/test/backend/test.py @@ -40,14 +40,6 @@ class DummyBackend(onnx.backend.base.Backend): onnx.save(model, "temp_model.onnx") # Call frontend to process temp_model.onnx, bit code will be generated. execute_commands([ONNX_MLIR, "temp_model.onnx"]) - # Call llc to generate object file from bitcode. - execute_commands( - [LLC, "-filetype=obj", "-relocation-model=pic", "temp_model.bc"]) - # Generate shared library from object file, linking with c runtime. - execute_commands([ - CXX, "-shared", "-fPIC", "temp_model.o", "-o", "temp_model.so", - "-L" + RUNTIME_DIR, "-lcruntime", "-Wl,-rpath=" + RUNTIME_DIR, - ]) return ExecutionSession("./temp_model.so", "_dyn_entry_point_main_graph") @classmethod @@ -62,7 +54,6 @@ backend_test = onnx.backend.test.BackendTest(DummyBackend, __name__) # Test directories: # https://github.com/onnx/onnx/tree/master/onnx/backend/test/data/node - test_to_enable = [ # Abs Op: "test_abs_cpu",