2018-12-11 14:20:22 +03:00
"""
Simple check list from AllenNLP repo : https : / / github . com / allenai / allennlp / blob / master / setup . py
To create the package for pypi .
2019-10-09 19:14:03 +03:00
1. Change the version in __init__ . py , setup . py as well as docs / source / conf . py .
2018-12-11 14:20:22 +03:00
2. Commit these changes with the message : " Release: VERSION "
3. Add a tag in git to mark the release : " git tag VERSION -m ' Adds tag VERSION for pypi ' "
Push the tag to git : git push - - tags origin master
4. Build both the sources and the wheel . Do not change anything in setup . py between
creating the wheel and the source distribution ( obviously ) .
2019-09-26 14:47:58 +03:00
For the wheel , run : " python setup.py bdist_wheel " in the top level directory .
2018-12-11 14:20:22 +03:00
( this will build a wheel for the python version you use to build it - make sure you use python 3. x ) .
For the sources , run : " python setup.py sdist "
2019-09-26 14:47:58 +03:00
You should now have a / dist directory with both . whl and . tar . gz source versions .
2018-12-11 14:20:22 +03:00
5. Check that everything looks correct by uploading the package to the pypi test server :
twine upload dist / * - r pypitest
( pypi suggest using twine as other methods upload files via plaintext . )
Check that you can install it in a virtualenv by running :
2019-09-26 11:15:53 +03:00
pip install - i https : / / testpypi . python . org / pypi transformers
2018-12-11 14:20:22 +03:00
6. Upload the final version to actual pypi :
twine upload dist / * - r pypi
7. Copy the release notes from RELEASE . md to the tag in github once everything is looking hunky - dory .
"""
2019-02-06 02:07:46 +03:00
from io import open
2018-11-15 22:56:10 +03:00
from setuptools import find_packages , setup
2019-12-04 08:52:23 +03:00
extras = {
2019-12-21 17:46:46 +03:00
" serving " : [ " pydantic " , " uvicorn " , " fastapi " ] ,
" serving-tf " : [ " pydantic " , " uvicorn " , " fastapi " , " tensorflow " ] ,
" serving-torch " : [ " pydantic " , " uvicorn " , " fastapi " , " torch " ] ,
2019-12-04 08:52:23 +03:00
}
2019-12-21 17:46:46 +03:00
extras [ " all " ] = [ package for package in extras . values ( ) ]
2019-12-04 08:52:23 +03:00
2018-11-15 22:56:10 +03:00
setup (
2019-09-26 11:15:53 +03:00
name = " transformers " ,
2019-12-21 00:22:20 +03:00
version = " 2.3.0 " ,
2019-09-26 14:52:24 +03:00
author = " Thomas Wolf, Lysandre Debut, Victor Sanh, Julien Chaumond, Google AI Language Team Authors, Open AI team Authors, Facebook AI Authors, Carnegie Mellon University Authors " ,
2018-11-15 22:56:10 +03:00
author_email = " thomas@huggingface.co " ,
2019-09-26 14:52:24 +03:00
description = " State-of-the-art Natural Language Processing for TensorFlow 2.0 and PyTorch " ,
2019-12-21 17:46:46 +03:00
long_description = open ( " README.md " , " r " , encoding = " utf-8 " ) . read ( ) ,
2018-11-15 22:56:10 +03:00
long_description_content_type = " text/markdown " ,
2019-12-21 17:46:46 +03:00
keywords = " NLP deep learning transformer pytorch tensorflow BERT GPT GPT-2 google openai CMU " ,
license = " Apache " ,
2019-09-26 11:15:53 +03:00
url = " https://github.com/huggingface/transformers " ,
2019-12-21 17:46:46 +03:00
packages = find_packages ( exclude = [ " *.tests " , " *.tests.* " , " tests.* " , " tests " ] ) ,
install_requires = [
" numpy " ,
" boto3 " ,
" filelock " ,
" requests " ,
" tqdm " ,
" regex != 2019.12.17 " ,
" sentencepiece " ,
" sacremoses " ,
2019-10-16 15:17:58 +03:00
] ,
2019-12-21 17:46:46 +03:00
extras_require = extras ,
scripts = [ " transformers-cli " ] ,
2019-02-06 02:07:46 +03:00
# python_requires='>=3.5.0',
2018-11-15 22:56:10 +03:00
classifiers = [
2019-12-21 17:46:46 +03:00
" Intended Audience :: Science/Research " ,
" License :: OSI Approved :: Apache Software License " ,
" Programming Language :: Python :: 3 " ,
" Topic :: Scientific/Engineering :: Artificial Intelligence " ,
2018-11-15 22:56:10 +03:00
] ,
)