build and packaging script improvement for release (#218)
* integrate opencv * small fixing * Add the opencv includes and libs * refine a little bit * standardize the output folder. * fix ctest on Linux * fix setup.py on output folder change. * more fixings for CI pipeline * more fixing 1 * more fixing 2 * more fixing 3 * ci pipeline fixing 1 * ci pipeline fixing 2 * a silly typo... * ci pipeline fixing 3 * fixing the file copy issue. * last fixing. * re-test the fullpath in build_ext. * One more try * extent timeout * mshost.yml indent * Update mshost.yaml for Azure Pipelines * cibuild build python versions * Update wheels.yml * only build python 3.8/3.9 * Update wheels.yml for Azure Pipelines * seperate the ci pipeline
This commit is contained in:
Родитель
972552126f
Коммит
909acb7ce4
|
@ -42,7 +42,7 @@ jobs:
|
|||
displayName: Unpack ONNXRuntime package.
|
||||
|
||||
- script: |
|
||||
sh ./build.sh -DONNXRUNTIME_LIB_DIR=onnxruntime-linux-x64-$(ort.version)/lib -DOCOS_ENABLE_CTEST=ON
|
||||
CPU_NUMBER=2 sh ./build.sh -DONNXRUNTIME_LIB_DIR=onnxruntime-linux-x64-$(ort.version)/lib -DOCOS_ENABLE_CTEST=ON
|
||||
displayName: build the customop library with onnxruntime
|
||||
|
||||
- script: |
|
||||
|
@ -62,7 +62,7 @@ jobs:
|
|||
displayName: Install requirements.txt
|
||||
|
||||
- script: |
|
||||
python setup.py develop
|
||||
CPU_NUMBER=2 python setup.py develop
|
||||
displayName: Build the library and tests
|
||||
|
||||
- script: python -m pip install $(torch.version)
|
||||
|
@ -289,7 +289,7 @@ jobs:
|
|||
|
||||
- script: |
|
||||
call activate pyenv
|
||||
python -m pytest test
|
||||
pytest test
|
||||
displayName: Run python test
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
|
@ -314,7 +314,11 @@ jobs:
|
|||
displayName: Setup emscripten pipeline
|
||||
|
||||
- script: |
|
||||
sh ./build.sh -DCMAKE_TOOLCHAIN_FILE=$(Build.BinariesDirectory)/emsdk/upstream/emscripten/cmake/Modules/Platform/Emscripten.cmake -DOCOS_ENABLE_SPM_TOKENIZER=ON -DOCOS_ENABLE_PYTHON=OFF
|
||||
sh ./build.sh \
|
||||
-DCMAKE_TOOLCHAIN_FILE=$(Build.BinariesDirectory)/emsdk/upstream/emscripten/cmake/Modules/Platform/Emscripten.cmake \
|
||||
-DOCOS_ENABLE_SPM_TOKENIZER=ON \
|
||||
-DOCOS_ENABLE_PYTHON=OFF \
|
||||
-DOCOS_ENABLE_OPENCV=OFF
|
||||
displayName: build the customop library with onnxruntime
|
||||
# TODO add unittest for webassembly
|
||||
|
||||
|
@ -378,10 +382,10 @@ jobs:
|
|||
workingDirectory: $(OnnxruntimeDirectory)
|
||||
|
||||
- script: |
|
||||
adb push out/Darwin/RelWithDebInfo/libortcustomops.so /data/local/tmp
|
||||
adb push out/Darwin/RelWithDebInfo/lib/libortcustomops.so /data/local/tmp
|
||||
adb push out/Darwin/RelWithDebInfo/data /data/local/tmp
|
||||
adb push out/Darwin/RelWithDebInfo/operators_test /data/local/tmp
|
||||
adb push out/Darwin/RelWithDebInfo/ortcustomops_test /data/local/tmp
|
||||
adb push out/Darwin/RelWithDebInfo/bin/operators_test /data/local/tmp
|
||||
adb push out/Darwin/RelWithDebInfo/bin/ortcustomops_test /data/local/tmp
|
||||
adb push $(OnnxruntimeDirectory)/build/Release/libonnxruntime.so /data/local/tmp
|
||||
adb shell 'cd /data/local/tmp/ && chmod a+x operators_test'
|
||||
adb shell 'cd /data/local/tmp/ && chmod a+x ortcustomops_test'
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
jobs:
|
||||
- job: linux
|
||||
pool: {vmImage: 'ubuntu-latest'}
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
- bash: |
|
||||
set -o errexit
|
||||
python3 -m pip install --upgrade pip
|
||||
pip3 install cibuildwheel==2.1.2
|
||||
displayName: Install dependencies
|
||||
- bash: cibuildwheel --output-dir wheelhouse .
|
||||
displayName: Build wheels
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs: {pathtoPublish: 'wheelhouse'}
|
||||
|
||||
- job: macos
|
||||
pool: {vmImage: 'macOS-latest'}
|
||||
variables:
|
||||
CIBW_ARCHS_MACOS: "x86_64 universal2 arm64"
|
||||
# Skip trying to test arm64 builds on Intel Macs
|
||||
# CIBW_TEST_SKIP: "*-macosx_arm64 *-macosx_universal2:arm64"
|
||||
# Disable building PyPy wheels
|
||||
CIBW_SKIP: pp*
|
||||
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
- bash: |
|
||||
set -o errexit
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install cibuildwheel==2.1.2
|
||||
displayName: Install dependencies
|
||||
- bash: cibuildwheel --output-dir wheelhouse .
|
||||
displayName: Build wheels
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs: {pathtoPublish: wheelhouse}
|
||||
|
||||
# - job: windows
|
||||
# pool: {vmImage: 'windows-latest'}
|
||||
# steps:
|
||||
# - task: UsePythonVersion@0
|
||||
# - bash: |
|
||||
# set -o errexit
|
||||
# python -m pip install --upgrade pip
|
||||
# pip install cibuildwheel==2.1.2
|
||||
# displayName: Install dependencies
|
||||
# - bash: cibuildwheel --output-dir wheelhouse .
|
||||
# displayName: Build wheels
|
||||
# - task: PublishBuildArtifacts@1
|
||||
# inputs: {pathtoPublish: 'wheelhouse'}
|
|
@ -0,0 +1,18 @@
|
|||
jobs:
|
||||
- job: linux
|
||||
timeoutInMinutes: 120
|
||||
pool: {vmImage: 'ubuntu-latest', name: 'Linux-CPU'}
|
||||
variables:
|
||||
CIBW_BUILD: "cp3{7,8,9}-*"
|
||||
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
- bash: |
|
||||
set -o errexit
|
||||
python3 -m pip install --upgrade pip
|
||||
pip3 install cibuildwheel
|
||||
displayName: Install dependencies
|
||||
- bash: cibuildwheel --output-dir wheelhouse .
|
||||
displayName: Build wheels
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs: {pathtoPublish: 'wheelhouse'}
|
|
@ -0,0 +1,23 @@
|
|||
jobs:
|
||||
- job: macos
|
||||
timeoutInMinutes: 120
|
||||
pool: {vmImage: 'macOS-latest'}
|
||||
variables:
|
||||
CIBW_BUILD: "cp3{8,9}-*"
|
||||
CIBW_ARCHS_MACOS: "x86_64 universal2 arm64"
|
||||
# Skip trying to test arm64 builds on Intel Macs
|
||||
# CIBW_TEST_SKIP: "*-macosx_arm64 *-macosx_universal2:arm64"
|
||||
# Disable building PyPy wheels
|
||||
CIBW_SKIP: pp*
|
||||
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
- bash: |
|
||||
set -o errexit
|
||||
python3 -m pip install --upgrade pip
|
||||
python3 -m pip install cibuildwheel
|
||||
displayName: Install dependencies
|
||||
- bash: CPU_NUMBER=2 cibuildwheel --output-dir wheelhouse .
|
||||
displayName: Build wheels
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs: {pathtoPublish: wheelhouse}
|
|
@ -0,0 +1,17 @@
|
|||
jobs:
|
||||
- job: windows
|
||||
timeoutInMinutes: 120
|
||||
pool: {vmImage: 'windows-latest', name: 'Win-CPU-2021'}
|
||||
variables:
|
||||
CIBW_BUILD: "cp3{7,8,9}-*"
|
||||
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
- script: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install cibuildwheel
|
||||
displayName: Install dependencies
|
||||
- bash: cibuildwheel --platform windows --output-dir wheelhouse .
|
||||
displayName: Build wheels
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs: {pathtoPublish: 'wheelhouse'}
|
|
@ -34,6 +34,7 @@ option(OCOS_ENABLE_BERT_TOKENIZER "Enable the BertTokenizer building" ON)
|
|||
option(OCOS_ENABLE_BLINGFIRE "Enable the Blingfire building" ON)
|
||||
option(OCOS_ENABLE_DLIB "Enable the Inverse building" ON)
|
||||
option(OCOS_ENABLE_MATH "Enable the math tensor operators building" ON)
|
||||
option(OCOS_ENABLE_OPENCV "Enable building the operators depending on opencv" ON)
|
||||
option(OCOS_ENABLE_STATIC_LIB "Enable generating static library" OFF)
|
||||
option(OCOS_ENABLE_SELECTED_OPLIST "Enable including the selected_ops tool file" OFF)
|
||||
|
||||
|
@ -48,6 +49,7 @@ function(disable_all_operators)
|
|||
set(OCOS_ENABLE_BLINGFIRE OFF CACHE INTERNAL "")
|
||||
set(OCOS_ENABLE_MATH OFF CACHE INTERNAL "")
|
||||
set(OCOS_ENABLE_DLIB OFF CACHE INTERNAL "")
|
||||
set(OCOS_ENABLE_OPENCV OFF CACHE INTERNAL "")
|
||||
endfunction()
|
||||
|
||||
if(NOT CC_OPTIMIZE)
|
||||
|
@ -103,7 +105,6 @@ if (OCOS_ENABLE_RE2_REGEX)
|
|||
set(RE2_BUILD_TESTING OFF CACHE INTERNAL "")
|
||||
message(STATUS "Fetch googlere2")
|
||||
include(googlere2)
|
||||
FetchContent_GetProperties(googlere2)
|
||||
endif()
|
||||
|
||||
if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten")
|
||||
|
@ -111,6 +112,16 @@ if (OCOS_ENABLE_RE2_REGEX)
|
|||
endif()
|
||||
endif()
|
||||
|
||||
macro(standardize_output_folder bin_target)
|
||||
set_target_properties(${bin_target}
|
||||
PROPERTIES
|
||||
RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin"
|
||||
LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
|
||||
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib"
|
||||
PDB_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin")
|
||||
endmacro()
|
||||
|
||||
|
||||
#### scan all source files
|
||||
|
||||
file(GLOB TARGET_SRC "operators/*.cc" "operators/*.h")
|
||||
|
@ -144,6 +155,11 @@ if (OCOS_ENABLE_MATH)
|
|||
list(APPEND TARGET_SRC ${TARGET_SRC_MATH} ${TARGET_SRC_DLIB} ${TARGET_SRC_INVERSE})
|
||||
endif()
|
||||
|
||||
if (OCOS_ENABLE_OPENCV)
|
||||
message(STATUS "Fetch opencv")
|
||||
include(opencv)
|
||||
endif()
|
||||
|
||||
set(_HAS_TOKENIZER OFF)
|
||||
if (OCOS_ENABLE_GPT2_TOKENIZER)
|
||||
# GPT2
|
||||
|
@ -203,25 +219,30 @@ endif()
|
|||
add_compile_options("$<$<C_COMPILER_ID:MSVC>:/utf-8>")
|
||||
add_compile_options("$<$<CXX_COMPILER_ID:MSVC>:/utf-8>")
|
||||
add_library(ocos_operators STATIC ${TARGET_SRC})
|
||||
target_include_directories(ocos_operators PUBLIC operators/tokenizer)
|
||||
|
||||
set(ocos_libraries "")
|
||||
if (OCOS_ENABLE_TF_STRING)
|
||||
list(APPEND ocos_libraries re2)
|
||||
endif()
|
||||
standardize_output_folder(ocos_operators)
|
||||
|
||||
target_include_directories(ocos_operators PUBLIC
|
||||
${PROJECT_SOURCE_DIR}/includes
|
||||
${PROJECT_SOURCE_DIR}/includes/onnxruntime
|
||||
${PROJECT_SOURCE_DIR}/operators)
|
||||
|
||||
${PROJECT_SOURCE_DIR}/operators
|
||||
${PROJECT_SOURCE_DIR}/operators/tokenizer)
|
||||
set(ocos_libraries "")
|
||||
set(OCOS_COMPILE_DEFINITIONS "")
|
||||
|
||||
if (OCOS_ENABLE_DLIB)
|
||||
list(APPEND OCOS_COMPILE_DEFINITIONS ENABLE_DLIB)
|
||||
endif()
|
||||
|
||||
if (_HAS_TOKENIZER)
|
||||
list(APPEND OCOS_COMPILE_DEFINITIONS ENABLE_TOKENIZER)
|
||||
endif()
|
||||
|
||||
if (OCOS_ENABLE_TF_STRING)
|
||||
target_include_directories(ocos_operators PUBLIC
|
||||
${googlere2_SOURCE_DIR}
|
||||
${farmhash_SOURCE_DIR}/src)
|
||||
list(APPEND OCOS_COMPILE_DEFINITIONS ENABLE_TF_STRING)
|
||||
list(APPEND OCOS_COMPILE_DEFINITIONS ENABLE_TF_STRING NOMINMAX FARMHASH_NO_BUILTIN_EXPECT)
|
||||
list(APPEND ocos_libraries re2)
|
||||
endif()
|
||||
|
||||
if (OCOS_ENABLE_RE2_REGEX)
|
||||
|
@ -234,12 +255,10 @@ if (OCOS_ENABLE_MATH)
|
|||
# The dlib matrix implementation is all in the headers, no library compiling needed.
|
||||
endif()
|
||||
|
||||
if (OCOS_ENABLE_DLIB)
|
||||
list(APPEND OCOS_COMPILE_DEFINITIONS ENABLE_DLIB)
|
||||
endif()
|
||||
|
||||
if (_HAS_TOKENIZER)
|
||||
list(APPEND OCOS_COMPILE_DEFINITIONS ENABLE_TOKENIZER)
|
||||
if (OCOS_ENABLE_OPENCV)
|
||||
list(APPEND OCOS_COMPILE_DEFINITIONS ENABLE_OPENCV)
|
||||
list(APPEND ocos_libraries ${opencv_LIBS})
|
||||
target_include_directories(ocos_operators PRIVATE ${opencv_INCLUDE_DIRS})
|
||||
endif()
|
||||
|
||||
if (OCOS_ENABLE_GPT2_TOKENIZER)
|
||||
|
@ -275,12 +294,6 @@ if (OCOS_ENABLE_GPT2_TOKENIZER OR OCOS_ENABLE_WORDPIECE_TOKENIZER)
|
|||
list(APPEND ocos_libraries nlohmann_json::nlohmann_json)
|
||||
endif()
|
||||
|
||||
if (OCOS_ENABLE_TF_STRING)
|
||||
target_compile_definitions(ocos_operators PRIVATE
|
||||
NOMINMAX
|
||||
FARMHASH_NO_BUILTIN_EXPECT)
|
||||
endif()
|
||||
|
||||
list(REMOVE_DUPLICATES OCOS_COMPILE_DEFINITIONS)
|
||||
target_compile_definitions(ocos_operators PRIVATE ${OCOS_COMPILE_DEFINITIONS})
|
||||
target_link_libraries(ocos_operators PRIVATE ${ocos_libraries})
|
||||
|
@ -301,6 +314,7 @@ if(OCOS_ENABLE_PYTHON)
|
|||
|
||||
file(GLOB TARGET_SRC_PYOPS "pyop/*.cc" "pyop/*.h")
|
||||
add_library(ortcustomops SHARED ${TARGET_SRC_PYOPS} ${shared_TARGET_SRC})
|
||||
standardize_output_folder(ortcustomops)
|
||||
list(APPEND OCOS_COMPILE_DEFINITIONS PYTHON_OP_SUPPORT)
|
||||
# building static lib has higher priority
|
||||
elseif(OCOS_ENABLE_STATIC_LIB)
|
||||
|
@ -332,9 +346,10 @@ else()
|
|||
target_link_libraries(ortcustomops PUBLIC log)
|
||||
endif()
|
||||
endif()
|
||||
standardize_output_folder(ortcustomops)
|
||||
endif()
|
||||
|
||||
target_compile_definitions(ortcustomops PRIVATE ${OCOS_COMPILE_DEFINITIONS})
|
||||
target_compile_definitions(ortcustomops PRIVATE ${OCOS_COMPILE_DEFINITIONS} ${GTEST_CXX_FLAGS})
|
||||
target_link_libraries(ortcustomops PUBLIC ocos_operators)
|
||||
|
||||
if(OCOS_ENABLE_PYTHON)
|
||||
|
@ -380,20 +395,23 @@ if (OCOS_ENABLE_CTEST)
|
|||
include(googletest)
|
||||
file(GLOB static_TEST_SRC "${TEST_SRC_DIR}/static_test/*.cc")
|
||||
add_executable(operators_test ${static_TEST_SRC})
|
||||
standardize_output_folder(operators_test)
|
||||
target_link_libraries(operators_test PRIVATE gtest_main ocos_operators ${ocos_libraries})
|
||||
add_test(NAME operators_test COMMAND $<TARGET_FILE:operators_test>)
|
||||
|
||||
set(LINUX_CC_FLAGS "")
|
||||
# needs to link with stdc++fs in Linux
|
||||
if(UNIX AND NOT APPLE AND NOT CMAKE_SYSTEM_NAME STREQUAL "Android")
|
||||
set(FS_STDLIB stdc++fs)
|
||||
list(APPEND LINUX_CC_FLAGS stdc++fs -pthread)
|
||||
endif()
|
||||
|
||||
file(GLOB shared_TEST_SRC "${TEST_SRC_DIR}/shared_test/*.cc")
|
||||
add_executable(ortcustomops_test ${shared_TEST_SRC})
|
||||
standardize_output_folder(ortcustomops_test)
|
||||
if (ONNXRUNTIME_LIB_DIR)
|
||||
target_link_directories(ortcustomops_test PRIVATE ${ONNXRUNTIME_LIB_DIR})
|
||||
endif()
|
||||
target_link_libraries(ortcustomops_test PRIVATE ortcustomops onnxruntime gtest_main ${ocos_libraries} ${FS_STDLIB})
|
||||
target_link_libraries(ortcustomops_test PRIVATE ortcustomops onnxruntime gtest_main ${ocos_libraries} ${LINUX_CC_FLAGS})
|
||||
if (WIN32)
|
||||
file(TO_CMAKE_PATH "${ONNXRUNTIME_LIB_DIR}/*" ONNXRUNTIME_LIB_FILEPATTERN)
|
||||
file(GLOB ONNXRUNTIME_LIB_FILES CONFIGURE_DEPENDS "${ONNXRUNTIME_LIB_FILEPATTERN}")
|
||||
|
|
14
build.bat
14
build.bat
|
@ -1,7 +1,7 @@
|
|||
@ECHO OFF
|
||||
ECHO Copy this file to mybuild.bat and make the changes as you needs
|
||||
ECHO Copy this file to mybuild.bat and make any changes you deem necessary
|
||||
SETLOCAL ENABLEDELAYEDEXPANSION
|
||||
|
||||
IF DEFINED VSINSTALLDIR GOTO :VSDEV_CMD
|
||||
set VCVARS="NOT/EXISTED"
|
||||
FOR %%I in (Enterprise Professional Community BuildTools^
|
||||
) DO IF EXIST "%ProgramFiles(x86)%\Microsoft Visual Studio\2019\%%I\VC\Auxiliary\Build\vcvars64.bat" (
|
||||
|
@ -10,8 +10,15 @@ FOR %%I in (Enterprise Professional Community BuildTools^
|
|||
IF NOT EXIST %VCVARS% GOTO :NOT_FOUND
|
||||
ECHO Found %VCVARS%
|
||||
CALL %VCVARS%
|
||||
|
||||
:VSDEV_CMD
|
||||
set GENERATOR="Visual Studio 16 2019"
|
||||
IF "%VisualStudioVersion:~0,2%" == "16" GOTO :START_BUILD
|
||||
set GENERATOR="Visual Studio 17 2022"
|
||||
|
||||
:START_BUILD
|
||||
mkdir .\out\Windows\ 2>NUL
|
||||
cmake -G "Visual Studio 16 2019" -A x64 %* -B out\Windows -S .
|
||||
cmake -G %GENERATOR% -A x64 %* -B out\Windows -S .
|
||||
IF %ERRORLEVEL% NEQ 0 EXIT /B %ERRORLEVEL%
|
||||
cmake --build out\Windows --config RelWithDebInfo
|
||||
IF %ERRORLEVEL% NEQ 0 EXIT /B %ERRORLEVEL%
|
||||
|
@ -19,6 +26,7 @@ GOTO :EOF
|
|||
|
||||
:NOT_FOUND
|
||||
ECHO "No Microsoft Visual Studio 2019 installation found!"
|
||||
ECHO " Or not run from Developer Command Prompt for VS 2022"
|
||||
EXIT /B 1
|
||||
|
||||
ENDLOCAL
|
||||
|
|
11
build.sh
11
build.sh
|
@ -4,8 +4,13 @@
|
|||
set -e -x -u
|
||||
|
||||
OSNAME=$(uname -s)
|
||||
if [[ "$OSNAME" == "Darwin" ]]; then
|
||||
alias nproc="sysctl -n hw.logicalcpu"
|
||||
fi
|
||||
|
||||
BUILD_FLAVOR=RelWithDebInfo
|
||||
target_dir=out/$OSNAME/$BUILD_FLAVOR
|
||||
mkdir -p $target_dir && cd $target_dir
|
||||
|
||||
cmake "$@" ../../.. && cmake --build . --config $BUILD_FLAVOR --parallel
|
||||
mkdir -p "$target_dir" && cd "$target_dir"
|
||||
if [ -z ${CPU_NUMBER+x} ]; then CPU_NUMBER=$(nproc); fi
|
||||
# it looks the parallel build on CI pipeline machine causes crashes.
|
||||
cmake "$@" ../../.. && cmake --build . --config $BUILD_FLAVOR --parallel "${CPU_NUMBER}"
|
||||
|
|
|
@ -8,7 +8,7 @@ FetchContent_GetProperties(googlere2)
|
|||
string(TOLOWER "googlere2" lcName)
|
||||
if(NOT ${lcName}_POPULATED)
|
||||
FetchContent_Populate(googlere2)
|
||||
add_subdirectory(${googlere2_SOURCE_DIR} ${googlere2_BINARY_DIR})
|
||||
add_subdirectory(${googlere2_SOURCE_DIR} ${googlere2_BINARY_DIR} EXCLUDE_FROM_ALL)
|
||||
set_target_properties(re2
|
||||
PROPERTIES
|
||||
POSITION_INDEPENDENT_CODE ON)
|
||||
|
|
|
@ -0,0 +1,103 @@
|
|||
set(BUILD_ZLIB OFF CACHE INTERNAL "")
|
||||
set(BUILD_TIFF OFF CACHE INTERNAL "")
|
||||
set(BUILD_OPENJPEG OFF CACHE INTERNAL "")
|
||||
set(BUILD_JASPER OFF CACHE INTERNAL "")
|
||||
set(BUILD_JPEG OFF CACHE INTERNAL "")
|
||||
set(BUILD_PNG OFF CACHE INTERNAL "")
|
||||
set(BUILD_OPENEXR OFF CACHE INTERNAL "")
|
||||
set(BUILD_WEBP OFF CACHE INTERNAL "")
|
||||
set(BUILD_TBB OFF CACHE INTERNAL "")
|
||||
set(BUILD_IPP_IW OFF CACHE INTERNAL "")
|
||||
set(BUILD_ITT OFF CACHE INTERNAL "")
|
||||
set(WITH_AVFOUNDATION OFF CACHE INTERNAL "")
|
||||
set(WITH_CAP_IOS OFF CACHE INTERNAL "")
|
||||
set(WITH_CAROTENE OFF CACHE INTERNAL "")
|
||||
set(WITH_CPUFEATURES OFF CACHE INTERNAL "")
|
||||
set(WITH_EIGEN OFF CACHE INTERNAL "")
|
||||
set(WITH_FFMPEG OFF CACHE INTERNAL "")
|
||||
set(WITH_GSTREAMER OFF CACHE INTERNAL "")
|
||||
set(WITH_GTK OFF CACHE INTERNAL "")
|
||||
set(WITH_IPP OFF CACHE INTERNAL "")
|
||||
set(WITH_HALIDE OFF CACHE INTERNAL "")
|
||||
set(WITH_VULKAN OFF CACHE INTERNAL "")
|
||||
set(WITH_INF_ENGINE OFF CACHE INTERNAL "")
|
||||
set(WITH_NGRAPH OFF CACHE INTERNAL "")
|
||||
set(WITH_JASPER OFF CACHE INTERNAL "")
|
||||
set(WITH_OPENJPEG OFF CACHE INTERNAL "")
|
||||
set(WITH_JPEG OFF CACHE INTERNAL "")
|
||||
set(WITH_WEBP OFF CACHE INTERNAL "")
|
||||
set(WITH_OPENEXR OFF CACHE INTERNAL "")
|
||||
set(WITH_PNG OFF CACHE INTERNAL "")
|
||||
set(WITH_TIFF OFF CACHE INTERNAL "")
|
||||
set(WITH_OPENVX OFF CACHE INTERNAL "")
|
||||
set(WITH_GDCM OFF CACHE INTERNAL "")
|
||||
set(WITH_TBB OFF CACHE INTERNAL "")
|
||||
set(WITH_HPX OFF CACHE INTERNAL "")
|
||||
set(WITH_OPENMP OFF CACHE INTERNAL "")
|
||||
set(WITH_PTHREADS_PF OFF CACHE INTERNAL "")
|
||||
set(WITH_V4L OFF CACHE INTERNAL "")
|
||||
set(WITH_CLP OFF CACHE INTERNAL "")
|
||||
set(WITH_OPENCL OFF CACHE INTERNAL "")
|
||||
set(WITH_OPENCL_SVM OFF CACHE INTERNAL "")
|
||||
set(WITH_ITT OFF CACHE INTERNAL "")
|
||||
set(WITH_PROTOBUF OFF CACHE INTERNAL "")
|
||||
set(WITH_IMGCODEC_HDR OFF CACHE INTERNAL "")
|
||||
set(WITH_IMGCODEC_SUNRASTER OFF CACHE INTERNAL "")
|
||||
set(WITH_IMGCODEC_PXM OFF CACHE INTERNAL "")
|
||||
set(WITH_IMGCODEC_PFM OFF CACHE INTERNAL "")
|
||||
set(WITH_QUIRC OFF CACHE INTERNAL "")
|
||||
set(WITH_ANDROID_MEDIANDK OFF CACHE INTERNAL "")
|
||||
set(WITH_TENGINE OFF CACHE INTERNAL "")
|
||||
set(WITH_ONNX OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_apps OFF CACHE INTERNAL "")
|
||||
set(BUILD_ANDROID_PROJECTS OFF CACHE INTERNAL "")
|
||||
set(BUILD_ANDROID_EXAMPLES OFF CACHE INTERNAL "")
|
||||
set(BUILD_DOCS OFF CACHE INTERNAL "")
|
||||
set(BUILD_WITH_STATIC_CRT OFF CACHE INTERNAL "")
|
||||
set(BUILD_FAT_JAVA_LIB OFF CACHE INTERNAL "")
|
||||
set(BUILD_ANDROID_SERVICE OFF CACHE INTERNAL "")
|
||||
set(BUILD_JAVA OFF CACHE INTERNAL "")
|
||||
set(BUILD_OBJC OFF CACHE INTERNAL "")
|
||||
set(ENABLE_PRECOMPILED_HEADERS OFF CACHE INTERNAL "")
|
||||
set(ENABLE_FAST_MATH OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_java OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_gapi OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_objc OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_js OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_ts OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_features2d OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_photo OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_video OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_python2 OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_python3 OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_dnn OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_imgcodecs OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_videoio OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_calib3d OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_highgui OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_flann OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_objdetect OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_stitching OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_ml OFF CACHE INTERNAL "")
|
||||
set(BUILD_opencv_world OFF CACHE INTERNAL "")
|
||||
|
||||
|
||||
FetchContent_Declare(
|
||||
opencv
|
||||
GIT_REPOSITORY https://github.com/opencv/opencv.git
|
||||
GIT_TAG 4.5.4
|
||||
GIT_SHALLOW TRUE
|
||||
-DBUILD_DOCS:BOOL=FALSE
|
||||
-DBUILD_EXAMPLES:BOOL=FALSE
|
||||
-DBUILD_TESTS:BOOL=FALSE
|
||||
-DBUILD_SHARED_LIBS:BOOL=FALSE
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_CURRENT_BINARY_DIR}/opencv
|
||||
-DCV_TRACE:BOOL=FALSE
|
||||
)
|
||||
|
||||
FetchContent_MakeAvailable(opencv)
|
||||
set(opencv_INCLUDE_DIRS "")
|
||||
list(APPEND opencv_INCLUDE_DIRS ${OPENCV_CONFIG_FILE_INCLUDE_DIR})
|
||||
list(APPEND opencv_INCLUDE_DIRS ${OPENCV_MODULE_opencv_core_LOCATION}/include ${OPENCV_MODULE_opencv_imgproc_LOCATION}/include)
|
||||
set(opencv_LIBS "")
|
||||
list(APPEND opencv_LIBS opencv_core opencv_imgproc)
|
|
@ -1,7 +0,0 @@
|
|||
# Dockerfile
|
||||
|
||||
This folder contains the docker file to help the user to build the packages from the source.
|
||||
|
||||
## onnx_ort_ext
|
||||
Build ONNX, ONNXRuntime and ONNXRuntime-Extensions packages from the onnxruntime current master branch,
|
||||
or from the source folder onnx_ort_ext/onnxruntime if it exists.
|
|
@ -1,71 +0,0 @@
|
|||
# This Dockerfile will build Docker Image with
|
||||
# ONNX (from ONNXRuntime submodule) + ONNXRuntime + ONNXRuntime-Extensions
|
||||
# installed for CPU only
|
||||
#
|
||||
# Example commandline to build this full package set.
|
||||
# docker build . -t docker-image-repo-name
|
||||
# Example commandline to run the built docker container:
|
||||
# sudo docker run --name container-name -it docker-image-repo-name
|
||||
|
||||
ARG PYTHON_VERSION=3.8
|
||||
|
||||
FROM ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
apt-utils \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
ccache \
|
||||
cmake \
|
||||
curl \
|
||||
git \
|
||||
wget \
|
||||
gcc-8 \
|
||||
g++-8 \
|
||||
libprotobuf-dev \
|
||||
protobuf-compiler && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 100 && \
|
||||
update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-8 100
|
||||
RUN /usr/sbin/update-ccache-symlinks
|
||||
RUN mkdir /opt/ccache && ccache --set-config=cache_dir=/opt/ccache
|
||||
|
||||
ENV PATH /opt/conda/bin:$PATH
|
||||
|
||||
RUN wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \
|
||||
chmod +x ~/miniconda.sh && \
|
||||
~/miniconda.sh -b -p /opt/conda && \
|
||||
rm ~/miniconda.sh && \
|
||||
/opt/conda/bin/conda install -y python=${PYTHON_VERSION} conda-build && \
|
||||
/opt/conda/bin/conda install -y numpy ninja setuptools cmake protobuf typing-extensions && \
|
||||
/opt/conda/bin/conda clean -ya
|
||||
|
||||
ADD . /source
|
||||
WORKDIR /root
|
||||
RUN if [ -d /source/onnnxruntime ] ; then ln -s /source/onnxruntime /root/onnxruntime ; fi
|
||||
RUN if [ ! -L /root/onnxruntime ] ; then git clone https://github.com/microsoft/onnxruntime.git && \
|
||||
cd onnxruntime && git submodule update --init --recursive ; fi
|
||||
|
||||
RUN cd /root/onnxruntime/cmake/external/onnx && \
|
||||
CMAKE_ARGS="-DONNX_USE_PROTOBUF_SHARED_LIBS=ON" python3 setup.py bdist_wheel && \
|
||||
python3 -m pip install dist/*.whl
|
||||
|
||||
RUN cd /root/onnxruntime && \
|
||||
/bin/bash ./build.sh \
|
||||
--config Release \
|
||||
--build_wheel \
|
||||
--update \
|
||||
--build \
|
||||
--parallel \
|
||||
--skip_submodule_sync \
|
||||
--cmake_extra_defines \
|
||||
ONNXRUNTIME_VERSION=$(cat ./VERSION_NUMBER) && \
|
||||
python3 -m pip install /root/onnxruntime/build/Linux/Release/dist/*.whl
|
||||
|
||||
RUN cd /root/onnxruntime/cmake/external/onnxruntime-extensions && \
|
||||
python3 setup.py bdist_wheel && \
|
||||
python3 -m pip install dist/*.whl
|
||||
|
||||
RUN if [ -L /root/onnxruntime ]; then unlink /root/onnxruntime ; fi && \
|
||||
if [ -d /root/onnxruntime ]; then rm -rf /root/onnxruntime ; fi && \
|
||||
rm -rf /opt/ccache
|
|
@ -37,6 +37,9 @@ extern "C" {
|
|||
#define _Outptr_result_buffer_maybenull_(X)
|
||||
#define ORT_ALL_ARGS_NONNULL __attribute__((nonnull))
|
||||
#else
|
||||
#if defined(__MINGW32__)
|
||||
#define _Frees_ptr_opt_
|
||||
#endif
|
||||
#include <specstrings.h>
|
||||
#define ORT_ALL_ARGS_NONNULL
|
||||
#endif
|
||||
|
@ -49,7 +52,11 @@ extern "C" {
|
|||
#else
|
||||
#define ORT_EXPORT
|
||||
#endif
|
||||
#if defined(__MINGW32__)
|
||||
#define ORT_API_CALL __stdcall
|
||||
#else
|
||||
#define ORT_API_CALL _stdcall
|
||||
#endif
|
||||
#define ORT_MUST_USE_RESULT
|
||||
#define ORTCHAR_T wchar_t
|
||||
#else
|
||||
|
|
|
@ -191,13 +191,21 @@ def get_id_models():
|
|||
return _OnnxModelFunction.id_object_map
|
||||
|
||||
|
||||
class OnnxTracedModelFunction:
|
||||
def __init__(self, onnx_model):
|
||||
self.func_id = create_model_function(onnx_model)
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return _OnnxTracedFunction.apply(torch.tensor(self.func_id), *args, **kwargs)
|
||||
|
||||
|
||||
class _OnnxModelModule(torch.nn.Module):
|
||||
def __init__(self, mdl):
|
||||
super(_OnnxModelModule, self).__init__()
|
||||
self.model_function_id = torch.tensor(create_model_function(mdl))
|
||||
self.function = OnnxTracedModelFunction(mdl)
|
||||
|
||||
def forward(self, *args):
|
||||
return _OnnxTracedFunction.apply(self.model_function_id, *args)
|
||||
return self.function(*args)
|
||||
|
||||
|
||||
def _symbolic_pythonop(g: torch._C.Graph, n: torch._C.Node, *args, **kwargs):
|
||||
|
|
|
@ -1,8 +1,3 @@
|
|||
[project]
|
||||
name = "onnxruntime_extensions"
|
||||
# since onnxruntime havn't supported Python 3.10 yet
|
||||
requires-python = "<3.10"
|
||||
|
||||
[build-system]
|
||||
# Minimum requirements for the build system to execute.
|
||||
requires = ["setuptools", "wheel", "numpy>=1.18.5"] # PEP 508 specifications.
|
||||
|
|
49
setup.py
49
setup.py
|
@ -5,10 +5,7 @@
|
|||
###########################################################################
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
from setuptools.dist import Distribution
|
||||
from setuptools.command.build_ext import build_ext as _build_ext
|
||||
from setuptools.command.develop import develop as _develop
|
||||
from setuptools.command.build_py import build_py as _build_py
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
@ -37,7 +34,7 @@ def load_msvcvar():
|
|||
if shutil.which('cmake') is None:
|
||||
raise SystemExit(
|
||||
"Cannot find cmake in the executable path, " +
|
||||
"please install one or specify the environement variable VCVARS to the path of VS vcvars64.bat.")
|
||||
"please install one or specify the environment variable VCVARS to the path of VS vcvars64.bat.")
|
||||
|
||||
|
||||
def read_git_refs():
|
||||
|
@ -55,7 +52,7 @@ def read_git_refs():
|
|||
_ln = dedent(_ln).strip('\n\r')
|
||||
if _ln.startswith(HEAD):
|
||||
_, _2 = _ln.split(' ')
|
||||
if (_2.startswith('refs/remotes/origin/rel-')):
|
||||
if _2.startswith('refs/remotes/origin/rel-'):
|
||||
release_branch = True
|
||||
return release_branch, HEAD
|
||||
|
||||
|
@ -81,7 +78,7 @@ class BuildCMakeExt(_build_ext):
|
|||
'-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + str(ext_fullpath.parent.absolute()),
|
||||
'-DOCOS_ENABLE_PYTHON=ON',
|
||||
'-DOCOS_ENABLE_CTEST=OFF',
|
||||
'-DOCOS_EXTENTION_NAME=' + pathlib.Path(self.get_ext_filename(extension.name)).name,
|
||||
'-DOCOS_EXTENTION_NAME=' + ext_fullpath.name,
|
||||
'-DCMAKE_BUILD_TYPE=' + config
|
||||
]
|
||||
# overwrite the Python module info if the auto-detection doesn't work.
|
||||
|
@ -94,9 +91,11 @@ class BuildCMakeExt(_build_ext):
|
|||
if self.debug:
|
||||
cmake_args += ['-DCC_OPTIMIZE=OFF']
|
||||
|
||||
# the parallel build has to be limited on some Linux VM machine.
|
||||
cpu_number = os.environ.get('CPU_NUMBER')
|
||||
build_args = [
|
||||
'--config', config,
|
||||
'--parallel'
|
||||
'--parallel' + ('' if cpu_number is None else ' ' + cpu_number)
|
||||
]
|
||||
|
||||
self.spawn(['cmake', '-S', str(project_dir), '-B', str(build_temp)] + cmake_args)
|
||||
|
@ -107,25 +106,9 @@ class BuildCMakeExt(_build_ext):
|
|||
config_dir = '.'
|
||||
if not (build_temp / 'build.ninja').exists():
|
||||
config_dir = config
|
||||
self.copy_file(build_temp / config_dir / 'ortcustomops.dll',
|
||||
self.get_ext_filename(extension.name))
|
||||
|
||||
|
||||
class BuildPy(_build_py):
|
||||
def run(self):
|
||||
self.run_command("build_ext")
|
||||
return super().run()
|
||||
|
||||
|
||||
class BuildDevelop(_develop):
|
||||
def run(self):
|
||||
self.run_command("build_ext")
|
||||
return super().run()
|
||||
|
||||
|
||||
class BinaryDistribution(Distribution):
|
||||
def has_ext_modules(self):
|
||||
return True
|
||||
self.copy_file(build_temp / 'bin' / config_dir / 'ortcustomops.dll', ext_fullpath)
|
||||
else:
|
||||
self.copy_file(build_temp / 'lib' / ext_fullpath.name, ext_fullpath)
|
||||
|
||||
|
||||
def read_requirements():
|
||||
|
@ -166,8 +149,8 @@ package_data = {
|
|||
}
|
||||
|
||||
long_description = ''
|
||||
with open(os.path.join(TOP_DIR, "README.md"), 'r') as f:
|
||||
long_description = f.read()
|
||||
with open(os.path.join(TOP_DIR, "README.md"), 'r') as _f:
|
||||
long_description += _f.read()
|
||||
start_pos = long_description.find('# Introduction')
|
||||
start_pos = 0 if start_pos < 0 else start_pos
|
||||
end_pos = long_description.find('# Contributing')
|
||||
|
@ -187,13 +170,8 @@ setup(
|
|||
author_email='onnx@microsoft.com',
|
||||
url='https://github.com/microsoft/onnxruntime-extensions',
|
||||
ext_modules=ext_modules,
|
||||
cmdclass=dict(
|
||||
build_ext=BuildCMakeExt,
|
||||
build_py=BuildPy,
|
||||
develop=BuildDevelop
|
||||
),
|
||||
cmdclass=dict(build_ext=BuildCMakeExt),
|
||||
include_package_data=True,
|
||||
distclass=BinaryDistribution,
|
||||
install_requires=read_requirements(),
|
||||
classifiers=[
|
||||
'Development Status :: 4 - Beta',
|
||||
|
@ -204,11 +182,10 @@ setup(
|
|||
'Operating System :: POSIX :: Linux',
|
||||
"Programming Language :: C++",
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Programming Language :: Python :: 3.9',
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
'License :: OSI Approved :: MIT License'
|
||||
],
|
||||
]
|
||||
)
|
||||
|
|
|
@ -16,8 +16,10 @@ const char* GetLibraryPath() {
|
|||
return "ortcustomops.dll";
|
||||
#elif defined(__APPLE__)
|
||||
return "libortcustomops.dylib";
|
||||
#elif defined(ANDROID) || defined(__ANDROID__)
|
||||
return "libortcustomops.so";
|
||||
#else
|
||||
return "./libortcustomops.so";
|
||||
return "lib/libortcustomops.so";
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -303,11 +305,20 @@ TEST(ustring, tensor_operator) {
|
|||
|
||||
const auto* api_base = OrtGetApiBase();
|
||||
const auto* api = api_base->GetApi(ORT_API_VERSION);
|
||||
api->GetAllocatorWithDefaultOptions(&allocator);
|
||||
auto status = api->GetAllocatorWithDefaultOptions(&allocator);
|
||||
ASSERT_TRUE(status == nullptr || api->GetErrorCode(status) == ORT_OK);
|
||||
if (status != nullptr) {
|
||||
api->ReleaseStatus(status);
|
||||
}
|
||||
|
||||
Ort::CustomOpApi custom_api(*api);
|
||||
|
||||
std::vector<int64_t> dim{2, 2};
|
||||
api->CreateTensorAsOrtValue(allocator, dim.data(), dim.size(), ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING, &tensor);
|
||||
status = api->CreateTensorAsOrtValue(allocator, dim.data(), dim.size(), ONNX_TENSOR_ELEMENT_DATA_TYPE_STRING, &tensor);
|
||||
ASSERT_TRUE(status == nullptr || api->GetErrorCode(status) == ORT_OK);
|
||||
if (status != nullptr) {
|
||||
api->ReleaseStatus(status);
|
||||
}
|
||||
|
||||
std::vector<ustring> input_value{ustring("test"), ustring("测试"), ustring("Test de"), ustring("🧐")};
|
||||
FillTensorDataString(*api, custom_api, nullptr, input_value, tensor);
|
||||
|
|
Загрузка…
Ссылка в новой задаче