From 48be0efea2535fe46dca16025bbb970a26bce9df Mon Sep 17 00:00:00 2001 From: Wenbing Li <10278425+wenbingl@users.noreply.github.com> Date: Tue, 1 Jun 2021 14:02:55 -0700 Subject: [PATCH] more refinement on the release doc and script (#99) * more refinement on the release doc and script * fix the build script. --- .github/workflows/scripts/wheelbuilder.sh | 2 +- README.md | 21 +++++++-------------- setup.py | 21 ++++++++++++++++----- 3 files changed, 24 insertions(+), 20 deletions(-) diff --git a/.github/workflows/scripts/wheelbuilder.sh b/.github/workflows/scripts/wheelbuilder.sh index bb995d0d..b11c071d 100755 --- a/.github/workflows/scripts/wheelbuilder.sh +++ b/.github/workflows/scripts/wheelbuilder.sh @@ -6,7 +6,7 @@ set -e -x -u PY_VERSION=$1 PLAT=$2 GITHUB_EVENT_NAME=$3 -BUILD_REQUIREMENTS='numpy==1.18.5' +BUILD_REQUIREMENTS='numpy>=1.18.5 wheel' PY_VER="cp${PY_VERSION//./}-cp${PY_VERSION//./}" if [ ! -d "/opt/python/${PY_VER}" ] diff --git a/README.md b/README.md index f7dc1452..dc7beaef 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,11 @@ # ONNXRuntime Extensions [![Build Status](https://dev.azure.com/aiinfra/ONNX%20Converters/_apis/build/status/microsoft.ort-customops?repoName=microsoft%2Fonnxruntime-extensions&branchName=main)](https://dev.azure.com/aiinfra/ONNX%20Converters/_build/latest?definitionId=907&repoName=microsoft%2Fonnxruntime-extensions&branchName=main) +# Introduction ONNXRuntime Extensions is a comprehensive package to extend the capability of the ONNX conversion and inference. 1. The CustomOp C++ library for [ONNX Runtime](http://onnxruntime.ai) on ONNXRuntime CustomOp API. 2. Support PyOp feature to implement the custom op with a Python function. -3. Build all-in-one ONNX model from the pre/post processing code, go to [docs/pre_post_processing.md](docs/pre_post_processing.md) for details. +3. Build all-in-one ONNX model from the pre/post processing code, go to [docs/pre_post_processing.md](https://github.com/microsoft/onnxruntime-extensions/blob/main/docs/pre_post_processing.md) for details. 4. Support Python per operator debugging, checking ```hook_model_op``` in onnxruntime_extensions Python package. # Quick Start @@ -23,15 +24,15 @@ output, *_ = gpt2_core(input_ids) next_id = numpy.argmax(output[:, :, -1, :], axis=-1) print(input_text[0] + decode(next_id).item()) ``` -This is a simplified version of GPT-2 inference for the demonstration only, The comprehensive solution on the GPT-2 model and its deviants are under development, and here is the [link](tutorials/gpt2bs.py) to the experimental. +This is a simplified version of GPT-2 inference for the demonstration only, The comprehensive solution on the GPT-2 model and its deviants are under development, and here is the [link](https://github.com/microsoft/onnxruntime-extensions/blob/main/tutorials/gpt2bs.py) to the experimental. ## Android/iOS -The previous processing python code can be translated into all-in-one model to be run in Android/iOS mobile platform, without any Python runtime and the 3rd-party dependencies requirement. Here is the [tutorial](tutorials/gpt2bs.py) +The previous processing python code can be translated into all-in-one model to be run in Android/iOS mobile platform, without any Python runtime and the 3rd-party dependencies requirement. Here is the [tutorial](https://github.com/microsoft/onnxruntime-extensions/blob/main/tutorials/gpt2bs.py) ## CustomOp Conversion The mainstream ONNX converters support the custom op generation if there is the operation from the original framework cannot be interpreted as ONNX standard operators. Check the following two examples on how to do this. -1. [CustomOp conversion by pytorch.onnx.exporter](tutorials/pytorch_custom_ops_tutorial.ipynb) -2. [CustomOp conversion by tf2onnx](tutorials/tf2onnx_custom_ops_tutorial.ipynb) +1. [CustomOp conversion by pytorch.onnx.exporter](https://github.com/microsoft/onnxruntime-extensions/blob/main/tutorials/pytorch_custom_ops_tutorial.ipynb) +2. [CustomOp conversion by tf2onnx](https://github.com/microsoft/onnxruntime-extensions/blob/main/tutorials/tf2onnx_custom_ops_tutorial.ipynb) ## Inference with CustomOp library The CustomOp library was written with C++, so that it supports run the model in the native binaries. The following is the example of C++ version. @@ -90,7 +91,7 @@ If only DLL/shared library is needed without any Python dependencies, please run By default the DLL or the library will be generated in the directory `out//`. There is a unit test to help verify the build. ## The static library and link with ONNXRuntime -For sake of the binary size, the project can be built as a static library and link into ONNXRuntime. Here is [the script](ci_build/onnxruntime_integration/build_with_onnxruntime.sh) to this, which is especially usefully on building the mobile release. +For sake of the binary size, the project can be built as a static library and link into ONNXRuntime. Here is [the script](https://github.com/microsoft/onnxruntime-extensions/blob/main/ci_build/onnxruntime_integration/build_with_onnxruntime.sh) to this, which is especially usefully on building the mobile release. # Contributing This project welcomes contributions and suggestions. Most contributions require you to agree to a @@ -105,13 +106,5 @@ This project has adopted the [Microsoft Open Source Code of Conduct](https://ope For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. -# Release -The package is currently release on test pypi -[onnxruntime-customops](https://test.pypi.org/project/onnxruntime-customops/). - -# Changes - -**0.0.2**: - # License [MIT License](LICENSE) diff --git a/setup.py b/setup.py index 870114c8..85e16194 100644 --- a/setup.py +++ b/setup.py @@ -100,12 +100,14 @@ class BuildCMakeExt(_build_ext): class BuildPy(_build_py): def run(self): - super().run() + self.run_command("build_ext") + return super().run() class BuildDevelop(_develop): def run(self): - super().run() + self.run_command("build_ext") + return super().run() def read_requirements(): @@ -142,19 +144,27 @@ package_data = { "onnxruntime_extensions": ["*.dll", "*.so", "*.pyd"], } +long_description = '' +with open(os.path.join(TOP_DIR, "README.md"), 'r') as f: + long_description = f.read() + start_pos = long_description.find('# Introduction') + start_pos = 0 if start_pos < 0 else start_pos + end_pos = long_description.find('# Contributing') + long_description = long_description[start_pos:end_pos] + setup( name='onnxruntime_extensions', version=read_version(), packages=packages, package_dir=package_dir, package_data=package_data, - description="ONNXRuntime Custom Operator Library", - long_description=open(os.path.join(os.getcwd(), "README.md"), 'r').read(), + description="ONNXRuntime Extensions", + long_description=long_description, long_description_content_type='text/markdown', license='MIT License', author='Microsoft Corporation', author_email='onnx@microsoft.com', - url='https://github.com/microsoft/ortcustomops', + url='https://github.com/microsoft/onnxruntime-extensions', ext_modules=ext_modules, cmdclass=dict( build_ext=BuildCMakeExt, @@ -172,6 +182,7 @@ setup( 'Operating System :: POSIX :: Linux', "Programming Language :: C++", 'Programming Language :: Python', + 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9',