2018-12-11 14:20:22 +03:00
"""
Simple check list from AllenNLP repo : https : / / github . com / allenai / allennlp / blob / master / setup . py
To create the package for pypi .
2019-10-09 19:14:03 +03:00
1. Change the version in __init__ . py , setup . py as well as docs / source / conf . py .
2018-12-11 14:20:22 +03:00
2020-05-07 21:15:20 +03:00
2. Unpin specific versions from setup . py ( like isort ) .
2018-12-11 14:20:22 +03:00
2. Commit these changes with the message : " Release: VERSION "
3. Add a tag in git to mark the release : " git tag VERSION -m ' Adds tag VERSION for pypi ' "
Push the tag to git : git push - - tags origin master
4. Build both the sources and the wheel . Do not change anything in setup . py between
creating the wheel and the source distribution ( obviously ) .
2019-09-26 14:47:58 +03:00
For the wheel , run : " python setup.py bdist_wheel " in the top level directory .
2019-12-22 20:22:29 +03:00
( this will build a wheel for the python version you use to build it ) .
2018-12-11 14:20:22 +03:00
For the sources , run : " python setup.py sdist "
2019-09-26 14:47:58 +03:00
You should now have a / dist directory with both . whl and . tar . gz source versions .
2018-12-11 14:20:22 +03:00
5. Check that everything looks correct by uploading the package to the pypi test server :
twine upload dist / * - r pypitest
( pypi suggest using twine as other methods upload files via plaintext . )
2020-01-31 17:48:15 +03:00
You may have to specify the repository url , use the following command then :
twine upload dist / * - r pypitest - - repository - url = https : / / test . pypi . org / legacy /
2018-12-11 14:20:22 +03:00
Check that you can install it in a virtualenv by running :
2019-09-26 11:15:53 +03:00
pip install - i https : / / testpypi . python . org / pypi transformers
2018-12-11 14:20:22 +03:00
6. Upload the final version to actual pypi :
twine upload dist / * - r pypi
7. Copy the release notes from RELEASE . md to the tag in github once everything is looking hunky - dory .
2020-02-19 19:57:17 +03:00
8. Update the documentation commit in . circleci / deploy . sh for the accurate documentation to be displayed
9. Update README . md to redirect to correct documentation .
2018-12-11 14:20:22 +03:00
"""
2019-12-21 17:57:32 +03:00
2019-12-23 22:06:39 +03:00
import shutil
from pathlib import Path
2018-11-15 22:56:10 +03:00
from setuptools import find_packages , setup
2019-12-04 08:52:23 +03:00
2019-12-23 22:06:39 +03:00
# Remove stale transformers.egg-info directory to avoid https://github.com/pypa/pip/issues/5466
stale_egg_info = Path ( __file__ ) . parent / " transformers.egg-info "
if stale_egg_info . exists ( ) :
print (
(
" Warning: {} exists. \n \n "
" If you recently updated transformers to 3.0 or later, this is expected, \n "
" but it may prevent transformers from installing in editable mode. \n \n "
" This directory is automatically generated by Python ' s packaging tools. \n "
" I will remove it now. \n \n "
" See https://github.com/pypa/pip/issues/5466 for details. \n "
) . format ( stale_egg_info )
)
shutil . rmtree ( stale_egg_info )
2019-12-22 22:28:26 +03:00
extras = { }
extras [ " mecab " ] = [ " mecab-python3 " ]
2020-03-24 01:38:09 +03:00
extras [ " sklearn " ] = [ " scikit-learn " ]
2020-05-14 23:35:52 +03:00
# keras2onnx and onnxconverter-common version is specific through a commit until 1.7.0 lands on pypi
extras [ " tf " ] = [
" tensorflow " ,
2020-05-22 18:03:07 +03:00
" onnxconverter-common @ git+git://github.com/microsoft/onnxconverter-common.git@f64ca15989b6dc95a1f3507ff6e4c395ba12dff5#egg=onnxconverter-common " ,
" keras2onnx @ git+git://github.com/onnx/keras-onnx.git@cbdc75cb950b16db7f0a67be96a278f8d2953b48#egg=keras2onnx "
2020-05-14 23:35:52 +03:00
]
extras [ " tf-cpu " ] = [
" tensorflow-cpu " ,
2020-05-22 18:03:07 +03:00
" onnxconverter-common @ git+git://github.com/microsoft/onnxconverter-common.git@f64ca15989b6dc95a1f3507ff6e4c395ba12dff5#egg=onnxconverter-common " ,
" keras2onnx @ git+git://github.com/onnx/keras-onnx.git@cbdc75cb950b16db7f0a67be96a278f8d2953b48#egg=keras2onnx "
2020-05-14 23:35:52 +03:00
]
2020-05-05 17:23:01 +03:00
extras [ " torch " ] = [ " torch " ]
2019-12-22 22:28:26 +03:00
2020-01-28 03:58:00 +03:00
extras [ " serving " ] = [ " pydantic " , " uvicorn " , " fastapi " , " starlette " ]
2019-12-22 22:28:26 +03:00
extras [ " all " ] = extras [ " serving " ] + [ " tensorflow " , " torch " ]
2020-05-01 16:05:47 +03:00
extras [ " testing " ] = [ " pytest " , " pytest-xdist " , " timeout-decorator " ]
2019-12-22 22:28:26 +03:00
extras [ " docs " ] = [ " recommonmark " , " sphinx " , " sphinx-markdown-tables " , " sphinx-rtd-theme " ]
2020-03-23 17:03:22 +03:00
extras [ " quality " ] = [
" black " ,
2020-05-22 18:03:07 +03:00
" isort @ git+git://github.com/timothycrosley/isort.git@e63ae06ec7d70b06df9e528357650281a3d3ec22#egg=isort " ,
2020-05-14 20:14:26 +03:00
" flake8 " ,
2020-03-23 17:03:22 +03:00
]
2020-05-14 00:38:50 +03:00
extras [ " dev " ] = extras [ " testing " ] + extras [ " quality " ] + [ " mecab-python3 " , " scikit-learn " , " tensorflow " , " torch " ]
2019-12-04 08:52:23 +03:00
2018-11-15 22:56:10 +03:00
setup (
2019-09-26 11:15:53 +03:00
name = " transformers " ,
2020-05-22 17:37:44 +03:00
version = " 2.10.0 " ,
author = " Thomas Wolf, Lysandre Debut, Victor Sanh, Julien Chaumond, Sam Shleifer, Patrick von Platen, Google AI Language Team Authors, Open AI team Authors, Facebook AI Authors, Carnegie Mellon University Authors " ,
2018-11-15 22:56:10 +03:00
author_email = " thomas@huggingface.co " ,
2019-09-26 14:52:24 +03:00
description = " State-of-the-art Natural Language Processing for TensorFlow 2.0 and PyTorch " ,
2019-12-21 17:46:46 +03:00
long_description = open ( " README.md " , " r " , encoding = " utf-8 " ) . read ( ) ,
2018-11-15 22:56:10 +03:00
long_description_content_type = " text/markdown " ,
2019-12-21 17:46:46 +03:00
keywords = " NLP deep learning transformer pytorch tensorflow BERT GPT GPT-2 google openai CMU " ,
license = " Apache " ,
2019-09-26 11:15:53 +03:00
url = " https://github.com/huggingface/transformers " ,
2019-12-22 21:14:07 +03:00
package_dir = { " " : " src " } ,
2019-12-22 15:54:22 +03:00
packages = find_packages ( " src " ) ,
2019-12-21 17:46:46 +03:00
install_requires = [
" numpy " ,
2020-04-22 18:02:29 +03:00
" tokenizers == 0.7.0 " ,
2020-03-25 18:10:20 +03:00
# dataclasses for Python versions that don't have it
" dataclasses;python_version< ' 3.7 ' " ,
2019-12-22 22:33:08 +03:00
# filesystem locks e.g. to prevent parallel downloads
2019-12-21 17:46:46 +03:00
" filelock " ,
2019-12-22 22:33:08 +03:00
# for downloading models over HTTPS
2019-12-21 17:46:46 +03:00
" requests " ,
2019-12-22 22:33:08 +03:00
# progress bars in model download and training scripts
2020-01-18 00:49:28 +03:00
" tqdm >= 4.27 " ,
2019-12-22 22:33:08 +03:00
# for OpenAI GPT
2019-12-21 17:46:46 +03:00
" regex != 2019.12.17 " ,
2019-12-22 22:33:08 +03:00
# for XLNet
2019-12-21 17:46:46 +03:00
" sentencepiece " ,
2019-12-22 22:33:08 +03:00
# for XLM
2019-12-21 17:46:46 +03:00
" sacremoses " ,
2019-10-16 15:17:58 +03:00
] ,
2019-12-21 17:46:46 +03:00
extras_require = extras ,
scripts = [ " transformers-cli " ] ,
2020-03-17 17:17:11 +03:00
python_requires = " >=3.6.0 " ,
2018-11-15 22:56:10 +03:00
classifiers = [
2019-12-22 21:14:07 +03:00
" Development Status :: 5 - Production/Stable " ,
" Intended Audience :: Developers " ,
" Intended Audience :: Education " ,
2019-12-21 17:46:46 +03:00
" Intended Audience :: Science/Research " ,
" License :: OSI Approved :: Apache Software License " ,
2019-12-22 21:14:07 +03:00
" Operating System :: OS Independent " ,
2019-12-21 17:46:46 +03:00
" Programming Language :: Python :: 3 " ,
2019-12-22 21:14:07 +03:00
" Programming Language :: Python :: 3.6 " ,
" Programming Language :: Python :: 3.7 " ,
2019-12-21 17:46:46 +03:00
" Topic :: Scientific/Engineering :: Artificial Intelligence " ,
2018-11-15 22:56:10 +03:00
] ,
)