зеркало из https://github.com/microsoft/azure-cli.git
Automation script for releasing Azure CLI (#3915)
* Automate Releases * First release. * Push modules to Git. * Publish all modules to PyPI. * Create GitHub releases. * Create and Publish packaged release archive. * Create and Publish Docker image. * Create and Publish Debian package. * Wrap password in quotes for special chars * Handle new module. * Add question with location header * Address code review comments * Update release notes. * Fix extra space
This commit is contained in:
Родитель
0932b8c53d
Коммит
a54befb16c
|
@ -14,12 +14,13 @@ COPY . /azure-cli
|
|||
# pip wheel - required for CLI packaging
|
||||
# jmespath-terminal - we include jpterm as a useful tool
|
||||
RUN pip install --upgrade pip wheel jmespath-terminal
|
||||
# bash gcc openssl-dev libffi-dev musl-dev - dependencies required for CLI
|
||||
# bash gcc make openssl-dev libffi-dev musl-dev - dependencies required for CLI
|
||||
# jq - we include jq as a useful tool
|
||||
# openssh - included for ssh-keygen
|
||||
# ca-certificates
|
||||
# ca-certificates
|
||||
# wget - required for installing jp
|
||||
RUN apk update && apk add bash gcc make openssl-dev libffi-dev musl-dev jq openssh ca-certificates wget openssl git && update-ca-certificates
|
||||
RUN apk update && apk add bash gcc make openssl-dev libffi-dev musl-dev jq openssh \
|
||||
ca-certificates wget openssl git && update-ca-certificates
|
||||
# We also, install jp
|
||||
RUN wget https://github.com/jmespath/jp/releases/download/0.1.2/jp-linux-amd64 -qO /usr/local/bin/jp && chmod +x /usr/local/bin/jp
|
||||
|
||||
|
|
|
@ -83,4 +83,3 @@ if __name__ == '__main__':
|
|||
print('-'*len(n['title']))
|
||||
print(n['content'])
|
||||
print()
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ ENV_PYPI_PASSWORD = os.environ.get('TWINE_PASSWORD')
|
|||
|
||||
assert (ENV_REPO_NAME and ENV_GITHUB_SECRET_TOKEN and ENV_ALLOWED_USERS and ENV_PYPI_REPO and ENV_PYPI_USERNAME and ENV_PYPI_PASSWORD and ENV_GITHUB_API_USER and ENV_GITHUB_API_USER_TOKEN),\
|
||||
"Not all required environment variables have been set. "\
|
||||
"Set ENV_REPO_NAME, GITHUB_SECRET_TOKEN, GITHUB_USER, GITHUB_USER_TOKEN, ALLOWED_USERS, PYPI_REPO, TWINE_USERNAME, TWINE_PASSWORD"
|
||||
"Set REPO_NAME, GITHUB_SECRET_TOKEN, GITHUB_USER, GITHUB_USER_TOKEN, ALLOWED_USERS, PYPI_REPO, TWINE_USERNAME, TWINE_PASSWORD"
|
||||
|
||||
GITHUB_API_AUTH = (ENV_GITHUB_API_USER, ENV_GITHUB_API_USER_TOKEN)
|
||||
GITHUB_API_HEADERS = {'Accept': 'application/vnd.github.v3+json', 'user-agent': 'azure-cli-bot/{}'.format(VERSION)}
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
FROM python:3.5.2-alpine
|
||||
|
||||
RUN apk update && apk upgrade && \
|
||||
apk add --no-cache bash git openssh gcc make \
|
||||
openssl-dev libffi-dev musl-dev ca-certificates openssl && update-ca-certificates
|
||||
|
||||
RUN pip install --upgrade pip wheel twine requests virtualenv uritemplate.py azure-cli sh
|
||||
|
||||
ADD . /
|
||||
|
||||
CMD cat README.md; printf "\n\n** starting bash...\n\n"; bash
|
|
@ -0,0 +1,22 @@
|
|||
.. :changelog:
|
||||
|
||||
Release History
|
||||
===============
|
||||
|
||||
|
||||
0.1.1 (2017-07-31)
|
||||
++++++++++++++++++
|
||||
|
||||
* Support releasing a module that hasn't been released before.
|
||||
* Support Docker password with special characters.
|
||||
|
||||
0.1.0 (2017-07-05)
|
||||
++++++++++++++++++
|
||||
|
||||
* First release.
|
||||
* Push modules to Git.
|
||||
* Publish all modules to PyPI.
|
||||
* Create GitHub releases.
|
||||
* Create and Publish packaged release archive.
|
||||
* Create and Publish Docker image.
|
||||
* Create and Publish Debian package.
|
|
@ -0,0 +1,57 @@
|
|||
Automated PyPI and GitHub releases of all CLI modules
|
||||
=====================================================
|
||||
|
||||
Description
|
||||
-----------
|
||||
This is a Docker image that automates releases of all CLI modules to PyPI and then creates GitHub releases for each.
|
||||
The scripts have been tested on Python 3 so it's recommended to run the Docker image.
|
||||
|
||||
How to Build
|
||||
------------
|
||||
```
|
||||
sudo docker build --no-cache -t azuresdk/azure-cli-release-automation:<VERSION> .
|
||||
```
|
||||
|
||||
How to Run
|
||||
----------
|
||||
```
|
||||
sudo docker run -it -e "REPO_NAME=azure/azure-cli" -e "GITHUB_USER=user1" -e "GITHUB_USER_TOKEN=<guid>" \
|
||||
-e "PYPI_REPO=https://test.pypi.org/legacy/" -e "TWINE_USERNAME=<user>" -e "TWINE_PASSWORD=<pass>" \
|
||||
-e "CLI_VERSION=0.0.0a1" -e "AZURE_STORAGE_CONNECTION_STRING=<connectionstring>" \
|
||||
azuresdk/azure-cli-release-automation:<VERSION>
|
||||
```
|
||||
|
||||
Once the container has started, there are several scripts available.
|
||||
They each require they're own set of environment variables.
|
||||
These can be set in the initial `docker run` command above or by using `export ENV=VALUE` directly in the running container.
|
||||
|
||||
```
|
||||
python release.py
|
||||
python release-docker.py
|
||||
python release-debian.py
|
||||
```
|
||||
|
||||
Environment Variables
|
||||
---------------------
|
||||
`REPO_NAME` - The name of the GitHub repo (e.g. azure/azure-cli)
|
||||
`GITHUB_USER` - User id of the bot that will post comments and create releases.
|
||||
`GITHUB_USER_TOKEN` - Access token for this user.
|
||||
`PYPI_REPO` - URL to PyPI (e.g. https://test.pypi.org/legacy/ or https://upload.pypi.org/legacy/).
|
||||
`TWINE_USERNAME` - Username to authenticate with PyPI.
|
||||
`TWINE_PASSWORD` - Password to authenticate with PyPI.
|
||||
`CLI_VERSION` - The new version of the CLI (used for packaged releases)
|
||||
`AZURE_STORAGE_CONNECTION_STRING` - The Azure storage connection string to upload release assets
|
||||
|
||||
The `GITHUB_USER` should have the following GitHub OAuth scopes:
|
||||
- repo_deployment (to create GitHub releases and commit to master)
|
||||
|
||||
`CLI_DOWNLOAD_SHA256` - The SHA256 sum of the packaged release (produced by `release.py`).
|
||||
|
||||
`DOCKER_REPO` - The Docker repo to push the image to (e.g. azuresdk/azure-cli-python).
|
||||
`DOCKER_USERNAME` - The Docker username that has push permissions to the above Docker repo.
|
||||
`DOCKER_PASSWORD` - The Docker password for the user.
|
||||
|
||||
`DEBIAN_REPO_ID` - The repository ID to publish the .deb package.
|
||||
`DEBIAN_REPO_URL` - The repository URL to publish the .deb package.
|
||||
`DEBIAN_REPO_USERNAME` - The repository username to publish the .deb package.
|
||||
`DEBIAN_REPO_PASSWORD` - The user password to publish the .deb package.
|
|
@ -0,0 +1,147 @@
|
|||
#!/usr/bin/env python
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
# This script is interactive as you need to log in to 'az'.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from six import StringIO
|
||||
from sh import az, ssh
|
||||
|
||||
|
||||
script_env = {}
|
||||
|
||||
def add_script_env(name):
|
||||
script_env[name] = os.environ.get(name)
|
||||
|
||||
add_script_env('REPO_NAME')
|
||||
add_script_env('CLI_VERSION')
|
||||
add_script_env('CLI_DOWNLOAD_SHA256')
|
||||
add_script_env('AZURE_STORAGE_CONNECTION_STRING')
|
||||
add_script_env('DEBIAN_REPO_ID')
|
||||
add_script_env('DEBIAN_REPO_URL')
|
||||
add_script_env('DEBIAN_REPO_USERNAME')
|
||||
add_script_env('DEBIAN_REPO_PASSWORD')
|
||||
|
||||
assert (all(script_env[n] != None for n in script_env)), "Not all required environment variables have been set. {}".format(script_env)
|
||||
|
||||
REPO_UPLOAD_SCRIPT_TMPL = """
|
||||
import os, requests
|
||||
payload = {{'name': 'azure-cli', 'version': '{cli_version}-1', 'repositoryId': '{repo_id}', 'sourceUrl': '{source_url}'}}
|
||||
r = requests.post('{repo_package_url}', verify=False, auth=('{repo_user}', '{repo_pass}'), json=payload)
|
||||
print("Status Code")
|
||||
print(r.status_code)
|
||||
print("Query with a GET to the following:")
|
||||
print(r.headers['Location'])
|
||||
"""
|
||||
|
||||
def print_env_vars():
|
||||
for n in script_env:
|
||||
print('{} = {}'.format(n, script_env[n]))
|
||||
|
||||
|
||||
def print_status(msg=''):
|
||||
print('-- '+msg)
|
||||
|
||||
def print_heading(heading):
|
||||
print('{0}\n{1}\n{0}'.format('=' * len(heading), heading))
|
||||
|
||||
def give_chance_to_cancel(msg_prefix=''):
|
||||
cancel_time_secs = 10
|
||||
msg_tmpl = '{}: Starting in {} seconds.'
|
||||
for i in range(cancel_time_secs, 0, -1):
|
||||
print_status(msg_tmpl.format(msg_prefix, i))
|
||||
time.sleep(1)
|
||||
|
||||
def main():
|
||||
print_env_vars()
|
||||
time_str = datetime.utcnow().strftime('%Y%m%d%H%M%S')
|
||||
az(["login"], _out=sys.stdout, _err=sys.stdout)
|
||||
resource_group = 'azurecli-release-debian-' + time_str
|
||||
vm_name = 'vm-debian-' + time_str
|
||||
print_status('Creating resource group.')
|
||||
az(['group', 'create', '-l', 'westus', '-n', resource_group], _out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Creating VM.')
|
||||
az(['vm', 'create', '-g', resource_group, '-n', vm_name, '--generate-ssh-keys', '--authentication-type', 'ssh',
|
||||
'--image', 'Canonical:UbuntuServer:14.04.4-LTS:latest', '--admin-username', 'ubuntu'],
|
||||
_out=sys.stdout, _err=sys.stdout)
|
||||
io = StringIO()
|
||||
print_status('Getting VM IP address.')
|
||||
az(['vm', 'list-ip-addresses', '--resource-group', resource_group, '--name', vm_name,
|
||||
'--query', '[0].virtualMachine.network.publicIpAddresses[0].ipAddress'], _out=io)
|
||||
ip_address = io.getvalue().strip().replace('"', '')
|
||||
print_status('VM IP address is {}'.format(ip_address))
|
||||
io.close()
|
||||
vm_connect_str = "ubuntu@{}".format(ip_address)
|
||||
my_vm = ssh.bake(['-oStrictHostKeyChecking=no', vm_connect_str])
|
||||
print_status('Installing git.')
|
||||
my_vm(['sudo', 'apt-get', 'update', '&&', 'sudo', 'apt-get', 'install', '-y', 'git'],
|
||||
_out=sys.stdout, _err=sys.stdout)
|
||||
io = StringIO()
|
||||
my_vm(['mktemp', '-d'], _out=io)
|
||||
repo_dir = io.getvalue().strip()
|
||||
io.close()
|
||||
print_status('Cloning repo.')
|
||||
my_vm(['git', 'clone', 'https://github.com/{}'.format(script_env.get('REPO_NAME')), repo_dir], _out=sys.stdout, _err=sys.stdout)
|
||||
path_to_debian_build_script = os.path.join(repo_dir, 'packaged_releases', 'debian', 'debian_build.sh')
|
||||
path_to_dir_creator = os.path.join(repo_dir, 'packaged_releases', 'debian', 'debian_dir_creator.sh')
|
||||
io = StringIO()
|
||||
my_vm(['mktemp', '-d'], _out=io)
|
||||
build_artifact_dir = io.getvalue().strip()
|
||||
io.close()
|
||||
print_status('Running debian build scripts.')
|
||||
my_vm(['chmod', '+x', path_to_debian_build_script, path_to_dir_creator], _out=sys.stdout, _err=sys.stdout)
|
||||
my_vm(['export', 'CLI_VERSION={}'.format(script_env.get('CLI_VERSION')), '&&',
|
||||
'export', 'CLI_DOWNLOAD_SHA256={}'.format(script_env.get('CLI_DOWNLOAD_SHA256')), '&&',
|
||||
'export', 'BUILD_ARTIFACT_DIR={}'.format(build_artifact_dir), '&&',
|
||||
path_to_debian_build_script, path_to_dir_creator],
|
||||
_out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Debian build complete.')
|
||||
io = StringIO()
|
||||
my_vm(['ls', build_artifact_dir], _out=io)
|
||||
artifact_name = io.getvalue().strip()
|
||||
io.close()
|
||||
deb_file_path = os.path.join(build_artifact_dir, artifact_name)
|
||||
print_status('Installing the .deb on the build machine')
|
||||
my_vm(['sudo', 'dpkg', '-i', deb_file_path], _out=sys.stdout, _err=sys.stdout)
|
||||
# Upload to Azure Storage
|
||||
print_status('Uploading .deb to Azure storage.')
|
||||
my_vm(['az', 'storage', 'container', 'create', '--name', 'repos', '--public-access', 'blob',
|
||||
'--connection-string', '"{}"'.format(script_env.get('AZURE_STORAGE_CONNECTION_STRING'))],
|
||||
_out=sys.stdout, _err=sys.stdout)
|
||||
my_vm(['az', 'storage', 'blob', 'upload', '-f', deb_file_path,
|
||||
'-n', artifact_name, '-c', 'repos', '--connection-string', '"{}"'.format(script_env.get('AZURE_STORAGE_CONNECTION_STRING'))],
|
||||
_out=sys.stdout, _err=sys.stdout)
|
||||
io = StringIO()
|
||||
my_vm(['az', 'storage', 'blob', 'url', '-n', artifact_name, '-c', 'repos', '--output', 'tsv',
|
||||
'--connection-string', '"{}"'.format(script_env.get('AZURE_STORAGE_CONNECTION_STRING'))], _out=io)
|
||||
deb_url = io.getvalue().strip()
|
||||
io.close()
|
||||
print_status('Debian file uploaded to the following URL.')
|
||||
print_status(deb_url)
|
||||
# Publish to apt service
|
||||
my_vm(['wget', '-q', 'https://bootstrap.pypa.io/get-pip.py'], _out=sys.stdout, _err=sys.stdout)
|
||||
my_vm(['sudo', 'python', 'get-pip.py'], _out=sys.stdout, _err=sys.stdout)
|
||||
my_vm(['sudo', 'pip', 'install', '--upgrade', 'requests'], _out=sys.stdout, _err=sys.stdout)
|
||||
upload_script = REPO_UPLOAD_SCRIPT_TMPL.format(cli_version=script_env.get('CLI_VERSION'),
|
||||
repo_id=script_env.get('DEBIAN_REPO_ID'),
|
||||
source_url=deb_url,
|
||||
repo_package_url=script_env.get('DEBIAN_REPO_URL'),
|
||||
repo_user=script_env.get('DEBIAN_REPO_USERNAME'),
|
||||
repo_pass=script_env.get('DEBIAN_REPO_PASSWORD'))
|
||||
my_vm(['echo', '-e', '"{}"'.format(upload_script), '>>', 'repo_upload.py'], _out=sys.stdout, _err=sys.stdout)
|
||||
my_vm(['python', 'repo_upload.py'], _out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Done. :)')
|
||||
give_chance_to_cancel('Delete resource group (in background)')
|
||||
az(['group', 'delete', '--name', resource_group, '--yes', '--no-wait'], _out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Finished. :)')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,105 @@
|
|||
#!/usr/bin/env python
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
# This script is interactive as you need to log in to 'az'.
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime
|
||||
from six import StringIO
|
||||
from sh import az, ssh
|
||||
|
||||
script_env = {}
|
||||
|
||||
def add_script_env(name):
|
||||
script_env[name] = os.environ.get(name)
|
||||
|
||||
add_script_env('REPO_NAME')
|
||||
add_script_env('CLI_VERSION')
|
||||
add_script_env('CLI_DOWNLOAD_SHA256')
|
||||
add_script_env('DOCKER_REPO')
|
||||
add_script_env('DOCKER_USERNAME')
|
||||
add_script_env('DOCKER_PASSWORD')
|
||||
|
||||
assert (all(script_env[n] != None for n in script_env)), "Not all required environment variables have been set. {}".format(script_env)
|
||||
|
||||
def print_env_vars():
|
||||
for n in script_env:
|
||||
print('{} = {}'.format(n, script_env[n]))
|
||||
|
||||
def print_status(msg=''):
|
||||
print('-- '+msg)
|
||||
|
||||
def print_heading(heading):
|
||||
print('{0}\n{1}\n{0}'.format('=' * len(heading), heading))
|
||||
|
||||
def give_chance_to_cancel(msg_prefix=''):
|
||||
cancel_time_secs = 10
|
||||
msg_tmpl = '{}: Starting in {} seconds.'
|
||||
for i in range(cancel_time_secs, 0, -1):
|
||||
print_status(msg_tmpl.format(msg_prefix, i))
|
||||
time.sleep(1)
|
||||
|
||||
def main():
|
||||
print_env_vars()
|
||||
time_str = datetime.utcnow().strftime('%Y%m%d%H%M%S')
|
||||
az(["login"], _out=sys.stdout, _err=sys.stdout)
|
||||
resource_group = 'azurecli-release-docker-' + time_str
|
||||
vm_name = 'vm-docker-' + time_str
|
||||
print_status('Creating resource group.')
|
||||
az(['group', 'create', '-l', 'westus', '-n', resource_group], _out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Creating VM.')
|
||||
az(['vm', 'create', '-g', resource_group, '-n', vm_name, '--generate-ssh-keys', '--authentication-type', 'ssh',
|
||||
'--image', 'Canonical:UbuntuServer:16.04-LTS:latest', '--admin-username', 'ubuntu'],
|
||||
_out=sys.stdout, _err=sys.stdout)
|
||||
io = StringIO()
|
||||
print_status('Getting VM IP address.')
|
||||
az(['vm', 'list-ip-addresses', '--resource-group', resource_group, '--name', vm_name,
|
||||
'--query', '[0].virtualMachine.network.publicIpAddresses[0].ipAddress'], _out=io)
|
||||
ip_address = io.getvalue().strip().replace('"', '')
|
||||
print_status('VM IP address is {}'.format(ip_address))
|
||||
io.close()
|
||||
vm_connect_str = "ubuntu@{}".format(ip_address)
|
||||
my_vm = ssh.bake(['-oStrictHostKeyChecking=no', vm_connect_str])
|
||||
print_status('Installing Docker.')
|
||||
my_vm(['curl', '-sSL', 'https://get.docker.com/', '-o', 'docker_install_script.sh'],
|
||||
_out=sys.stdout, _err=sys.stdout)
|
||||
my_vm(['sh', 'docker_install_script.sh'], _out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Docker installed.')
|
||||
io = StringIO()
|
||||
my_vm(['mktemp', '-d'], _out=io)
|
||||
repo_dir = io.getvalue().strip()
|
||||
io.close()
|
||||
print_status('Cloning repo.')
|
||||
my_vm(['git', 'clone', 'https://github.com/{}'.format(script_env.get('REPO_NAME')), repo_dir], _out=sys.stdout, _err=sys.stdout)
|
||||
image_tag = '{}:{}'.format(script_env.get('DOCKER_REPO'), script_env.get('CLI_VERSION'))
|
||||
path_to_dockerfile = os.path.join(repo_dir, 'packaged_releases', 'docker', 'Dockerfile')
|
||||
path_to_docker_context = os.path.join(repo_dir, 'packaged_releases', 'docker')
|
||||
print_status('Running Docker build.')
|
||||
my_vm(['sudo', 'docker', 'build', '--no-cache',
|
||||
'--build-arg', 'BUILD_DATE="`date -u +"%Y-%m-%dT%H:%M:%SZ"`"',
|
||||
'--build-arg', 'CLI_VERSION={}'.format(script_env.get('CLI_VERSION')),
|
||||
'--build-arg', 'CLI_DOWNLOAD_SHA256={}'.format(script_env.get('CLI_DOWNLOAD_SHA256')),
|
||||
'-f', path_to_dockerfile,
|
||||
'-t', image_tag,
|
||||
path_to_docker_context], _out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Docker build complete.')
|
||||
print_status('Running Docker log in.')
|
||||
my_vm(['sudo', 'docker', 'login', '--username', script_env.get('DOCKER_USERNAME'), '--password', '"{}"'.format(script_env.get('DOCKER_PASSWORD'))],
|
||||
_out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Running Docker push.')
|
||||
my_vm(['sudo', 'docker', 'push', image_tag], _out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Image pushed to Docker Hub.')
|
||||
print_status('Done. :)')
|
||||
give_chance_to_cancel('Delete resource group (in background)')
|
||||
az(['group', 'delete', '--name', resource_group, '--yes', '--no-wait'], _out=sys.stdout, _err=sys.stdout)
|
||||
print_status('Finished. :)')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,369 @@
|
|||
#!/usr/bin/env python
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import glob
|
||||
import re
|
||||
import time
|
||||
import fileinput
|
||||
import requests
|
||||
import hashlib
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from subprocess import check_call, check_output, CalledProcessError
|
||||
from uritemplate import URITemplate, expand
|
||||
|
||||
|
||||
script_env = {}
|
||||
|
||||
def add_script_env(name):
|
||||
script_env[name] = os.environ.get(name)
|
||||
|
||||
add_script_env('REPO_NAME')
|
||||
add_script_env('GITHUB_USER')
|
||||
add_script_env('GITHUB_USER_TOKEN')
|
||||
add_script_env('PYPI_REPO')
|
||||
# although not used directly here, twine env vars are needed for releasing
|
||||
add_script_env('TWINE_USERNAME')
|
||||
add_script_env('TWINE_PASSWORD')
|
||||
# the new version of the CLI
|
||||
add_script_env('CLI_VERSION')
|
||||
add_script_env('AZURE_STORAGE_CONNECTION_STRING')
|
||||
|
||||
assert (all(script_env[n] != None for n in script_env)), "Not all required environment variables have been set. {}".format(script_env)
|
||||
|
||||
GITHUB_API_AUTH = (script_env.get('GITHUB_USER'), script_env.get('GITHUB_USER_TOKEN'))
|
||||
GITHUB_API_HEADERS = {'Accept': 'application/vnd.github.v3+json', 'user-agent': 'azure-cli-pypi-github-releaser/v1'}
|
||||
|
||||
SOURCE_ARCHIVE_NAME = 'source.tar.gz'
|
||||
|
||||
GITHUB_RELEASE_BODY_TMPL = """
|
||||
The module has been published to PyPI.
|
||||
|
||||
View HISTORY.rst of the module for a changelog.
|
||||
|
||||
{}
|
||||
|
||||
Full release notes at https://docs.microsoft.com/en-us/cli/azure/release-notes-azure-cli
|
||||
|
||||
"""
|
||||
|
||||
COMMAND_MODULE_PREFIX = 'azure-cli-'
|
||||
MODULES_TO_ALWAYS_RELEASE = ['azure-cli']
|
||||
MODULES_TO_SKIP = ['azure-cli-testsdk']
|
||||
|
||||
def give_chance_to_cancel(msg_prefix=''):
|
||||
cancel_time_secs = 10
|
||||
msg_tmpl = '{}: Starting in {} seconds.'
|
||||
for i in range(cancel_time_secs, 0, -1):
|
||||
print_status(msg_tmpl.format(msg_prefix, i))
|
||||
time.sleep(1)
|
||||
|
||||
def print_env_vars():
|
||||
for n in script_env:
|
||||
print('{} = {}'.format(n, script_env[n]))
|
||||
|
||||
def print_status(msg=''):
|
||||
print('-- '+msg)
|
||||
|
||||
def print_heading(heading):
|
||||
print('{0}\n{1}\n{0}'.format('=' * len(heading), heading))
|
||||
|
||||
def _get_core_modules_paths(repo_working_dir):
|
||||
for path in glob.glob(repo_working_dir + '/src/*/setup.py'):
|
||||
yield os.path.basename(os.path.dirname(path)), os.path.dirname(path)
|
||||
|
||||
def _get_command_modules_paths(repo_working_dir, include_prefix=False):
|
||||
for path in glob.glob(repo_working_dir + '/src/command_modules/{}*/setup.py'.format(
|
||||
COMMAND_MODULE_PREFIX)):
|
||||
folder = os.path.dirname(path)
|
||||
name = os.path.basename(folder)
|
||||
if not include_prefix:
|
||||
name = name[len(COMMAND_MODULE_PREFIX):]
|
||||
yield name, folder
|
||||
|
||||
def _get_all_module_paths(repo_working_dir):
|
||||
return list(_get_core_modules_paths(repo_working_dir)) + list(_get_command_modules_paths(repo_working_dir, include_prefix=True))
|
||||
|
||||
def _get_current_module_version(mod_path):
|
||||
mod_version = None
|
||||
with open(os.path.join(mod_path, 'setup.py'), 'r') as fh:
|
||||
version_re = re.compile('VERSION = *')
|
||||
lines = fh.readlines()
|
||||
for _, line in enumerate(lines):
|
||||
if version_re.match(line):
|
||||
mod_version = line.split('=')[1].strip(' "\'').split('+')[0]
|
||||
return mod_version
|
||||
|
||||
def clone_repo(repo_working_dir):
|
||||
check_call(['git', 'clone', 'https://github.com/{}'.format(script_env.get('REPO_NAME')), repo_working_dir])
|
||||
check_call(['git', 'checkout', 'master'], cwd=repo_working_dir)
|
||||
|
||||
def should_release_module(mod_name, mod_path, repo_working_dir):
|
||||
if mod_name in MODULES_TO_ALWAYS_RELEASE:
|
||||
print_status('We always release {}.'.format(mod_name))
|
||||
return True
|
||||
if mod_name in MODULES_TO_SKIP:
|
||||
print_status('Skipping module {} as in modules to skip list.'.format(mod_name))
|
||||
return False
|
||||
# Determine if should release based on the current version
|
||||
cur_mod_version = _get_current_module_version(mod_path)
|
||||
r_start = '{}-{}'.format(mod_name, cur_mod_version)
|
||||
revision_range = "{}..{}".format(r_start, 'HEAD')
|
||||
try:
|
||||
module_changes = check_output(["git", "log", "--pretty=format:* %s", revision_range, "--", mod_path, ":(exclude)*/tests/*"],
|
||||
cwd=repo_working_dir)
|
||||
except CalledProcessError:
|
||||
# Maybe the revision_range is invalid if this is a new module.
|
||||
return True
|
||||
if module_changes:
|
||||
print_status('Begin changes in {}'.format(mod_name))
|
||||
print(str(module_changes, 'utf-8'))
|
||||
print_status('End changes in {}'.format(mod_name))
|
||||
return True
|
||||
print_status('Skipping module {} as there are no changes.'.format(mod_name))
|
||||
return False
|
||||
|
||||
def modify_setuppy_version(mod_name, mod_path):
|
||||
setuppy_path = os.path.join(mod_path, 'setup.py')
|
||||
with open(setuppy_path, 'r') as fh:
|
||||
version_re = re.compile('VERSION = *')
|
||||
lines = fh.readlines()
|
||||
for index, line in enumerate(lines):
|
||||
if version_re.match(line):
|
||||
old_version = line.split('=')[1].strip(' "\'').split('+')[0]
|
||||
major, minor, rev = old_version.split('.')
|
||||
rev = int(rev) + 1
|
||||
version = '{}.{}.{}'.format(major, minor, rev)
|
||||
lines[index] = 'VERSION = "{}+dev"\n'.format(version)
|
||||
update_setup = lines
|
||||
break
|
||||
else:
|
||||
raise ValueError('In the setup file {}, version is not found.'.format(setuppy_path))
|
||||
if update_setup:
|
||||
with open(setuppy_path, 'w') as fh:
|
||||
fh.writelines(update_setup)
|
||||
else:
|
||||
raise ValueError('No updated content for setup.py in {}.'.format(mod_name))
|
||||
return old_version, version
|
||||
|
||||
def modify_initpy_version(mod_name, mod_path, old_version, new_version):
|
||||
if mod_name == 'azure-cli':
|
||||
path_to_init = os.path.join(mod_path, 'azure', 'cli', '__init__.py')
|
||||
elif mod_name == 'azure-cli-core':
|
||||
path_to_init = os.path.join(mod_path, 'azure', 'cli', 'core', '__init__.py')
|
||||
for _, line in enumerate(fileinput.input(path_to_init, inplace=1)):
|
||||
if line.startswith('__version__'):
|
||||
sys.stdout.write(line.replace(old_version, new_version))
|
||||
else:
|
||||
sys.stdout.write(line)
|
||||
|
||||
def modify_historyrst(mod_name, mod_path, old_version, new_version):
|
||||
historyrst_path = os.path.join(mod_path, 'HISTORY.rst')
|
||||
new_history_lines = []
|
||||
just_seen_unreleased = False
|
||||
contains_unreleased = False
|
||||
with open(historyrst_path, 'r') as fq:
|
||||
lines = fq.readlines()
|
||||
for _, line in enumerate(lines):
|
||||
if 'unreleased' in line.lower() and not line.startswith('* '):
|
||||
contains_unreleased = True
|
||||
if contains_unreleased:
|
||||
for _, line in enumerate(lines):
|
||||
if just_seen_unreleased:
|
||||
# skip the line as it's just a heading for the old unreleased section
|
||||
just_seen_unreleased = False
|
||||
continue
|
||||
if 'unreleased' in line.lower() and not line.startswith('* '):
|
||||
new_heading = '{} ({})'.format(new_version, datetime.utcnow().strftime('%Y-%m-%d'))
|
||||
line = '{}\n{}\n'.format(new_heading, '+' * len(new_heading))
|
||||
just_seen_unreleased = True
|
||||
new_history_lines.append(line)
|
||||
else:
|
||||
for index, line in enumerate(lines):
|
||||
if line.startswith('Release History'):
|
||||
begin = index + 2
|
||||
if old_version in line:
|
||||
end = index
|
||||
break
|
||||
new_heading = '{} ({})'.format(new_version, datetime.utcnow().strftime('%Y-%m-%d'))
|
||||
line = '{}\n{}\n'.format(new_heading, '+' * len(new_heading))
|
||||
release_notes = [line]
|
||||
if mod_name in MODULES_TO_ALWAYS_RELEASE:
|
||||
release_notes.append('* no changes\n\n')
|
||||
else:
|
||||
release_notes.append('* minor fixes\n\n')
|
||||
new_history_lines = lines[:begin] + release_notes + lines[end:]
|
||||
with open(historyrst_path, 'w') as fq:
|
||||
fq.writelines(new_history_lines)
|
||||
|
||||
|
||||
def release_module(mod_name, mod_path, repo_working_dir):
|
||||
# Change version in setup.py
|
||||
old_version, new_version = modify_setuppy_version(mod_name, mod_path)
|
||||
# Need to modify __init__.py for these modules as well
|
||||
if mod_name in ['azure-cli', 'azure-cli-core']:
|
||||
modify_initpy_version(mod_name, mod_path, old_version, new_version)
|
||||
# Modify HISTORY.rst
|
||||
modify_historyrst(mod_name, mod_path, old_version, new_version)
|
||||
# Create commit with appropriate message.
|
||||
commit_message = 'Release {} {}'.format(mod_name, new_version)
|
||||
check_call(['git', 'commit', '-am', commit_message], cwd=repo_working_dir)
|
||||
commitish = check_output(['git', 'rev-parse', 'HEAD'], cwd=repo_working_dir)
|
||||
commitish = str(commitish, 'utf-8')
|
||||
commitish = commitish.strip()
|
||||
return mod_name, commitish, new_version
|
||||
|
||||
|
||||
def install_cli_into_venv():
|
||||
venv_dir = tempfile.mkdtemp()
|
||||
check_call(['virtualenv', venv_dir])
|
||||
path_to_pip = os.path.join(venv_dir, 'bin', 'pip')
|
||||
extra_index_url = 'https://testpypi.python.org/simple' if script_env.get('PYPI_REPO') == 'https://test.pypi.org/legacy/' else None
|
||||
args = [path_to_pip, 'install', 'azure-cli']
|
||||
if extra_index_url:
|
||||
args.extend(['--extra-index-url', extra_index_url])
|
||||
check_call(args)
|
||||
deps = check_output([path_to_pip, 'freeze'])
|
||||
deps = str(deps, 'utf-8')
|
||||
deps = deps.split('\n')
|
||||
cli_components = []
|
||||
for dep in deps:
|
||||
if dep.startswith('azure-cli'):
|
||||
cli_components.append(dep.split('=='))
|
||||
return cli_components
|
||||
|
||||
def run_push_to_git():
|
||||
repo_working_dir = tempfile.mkdtemp()
|
||||
clone_repo(repo_working_dir)
|
||||
configure_git(repo_working_dir)
|
||||
commitish_list = []
|
||||
for mod_name, mod_path in _get_all_module_paths(repo_working_dir):
|
||||
print_heading(mod_name.upper())
|
||||
if should_release_module(mod_name, mod_path, repo_working_dir):
|
||||
mod_name, commitish, new_version = release_module(mod_name, mod_path, repo_working_dir)
|
||||
commitish_list.append((mod_name, commitish, new_version))
|
||||
else:
|
||||
print_status('Skipped {}'.format(mod_name))
|
||||
# Push all commits to master.
|
||||
check_call(['git', 'push', '-f', 'origin', 'master'], cwd=repo_working_dir)
|
||||
return commitish_list
|
||||
|
||||
def set_up_cli_repo_dir():
|
||||
working_dir = tempfile.mkdtemp()
|
||||
check_call(['git', 'clone', 'https://github.com/{}'.format(script_env.get('REPO_NAME')), working_dir])
|
||||
check_call(['pip', 'install', '-e', 'scripts'], cwd=working_dir)
|
||||
return working_dir
|
||||
|
||||
def publish_to_pypi(working_dir, commitish_list):
|
||||
# Publish all in commitish list to PyPI
|
||||
assets_dir_map = {}
|
||||
for mod_name, commitish, _ in commitish_list:
|
||||
assets_dir = tempfile.mkdtemp()
|
||||
check_call(['git', 'checkout', commitish], cwd=working_dir)
|
||||
check_call(['python', '-m', 'scripts.automation.release.run', '-c', mod_name,
|
||||
'-r', script_env.get('PYPI_REPO'), '--dest', assets_dir], cwd=working_dir)
|
||||
assets_dir_map[mod_name] = assets_dir
|
||||
# reset back
|
||||
check_call(['git', 'checkout', 'master'], cwd=working_dir)
|
||||
return assets_dir_map
|
||||
|
||||
def upload_asset(upload_uri_tmpl, filepath, label):
|
||||
filename = os.path.basename(filepath)
|
||||
upload_url = URITemplate(upload_uri_tmpl).expand(name=filename, label=label)
|
||||
headers = GITHUB_API_HEADERS
|
||||
headers['Content-Type'] = 'application/octet-stream'
|
||||
with open(filepath, 'rb') as payload:
|
||||
requests.post(upload_url, data=payload, auth=GITHUB_API_AUTH, headers=headers)
|
||||
|
||||
def upload_assets_for_github_release(upload_uri_tmpl, component_name, component_version, assets_dir):
|
||||
for filename in os.listdir(assets_dir):
|
||||
fullpath = os.path.join(assets_dir, filename)
|
||||
if filename == SOURCE_ARCHIVE_NAME:
|
||||
upload_asset(upload_uri_tmpl, fullpath, '{} {} source code (.tar.gz)'.format(component_name, component_version))
|
||||
elif filename.endswith('.tar.gz'):
|
||||
upload_asset(upload_uri_tmpl, fullpath, '{} {} Source Distribution (.tar.gz)'.format(component_name, component_version))
|
||||
elif filename.endswith('.whl'):
|
||||
upload_asset(upload_uri_tmpl, fullpath, '{} {} Python Wheel (.whl)'.format(component_name, component_version))
|
||||
|
||||
def run_create_github_release(commitish_list, assets_dir_map):
|
||||
# Create Github release (inc. the artifacts .whl etc.).
|
||||
print_heading('Creating GitHub releases')
|
||||
for mod_name, commitish, mod_version in commitish_list:
|
||||
print_status('Publishing GitHub release for {} {}'.format(mod_name, mod_version))
|
||||
tag_name = '{}-{}'.format(mod_name, mod_version)
|
||||
release_name = "{} {}".format(mod_name, mod_version)
|
||||
if script_env.get('PYPI_REPO') == 'https://upload.pypi.org/legacy/':
|
||||
released_pypi_url = 'https://pypi.org/project/{}/{}'.format(mod_name, mod_version)
|
||||
elif script_env.get('PYPI_REPO') == 'https://test.pypi.org/legacy/':
|
||||
released_pypi_url = 'https://test.pypi.org/project/{}/{}'.format(mod_name, mod_version)
|
||||
else:
|
||||
released_pypi_url = ''
|
||||
payload = {'tag_name': tag_name, "target_commitish": commitish, "name": release_name, "body": GITHUB_RELEASE_BODY_TMPL.format(released_pypi_url), "prerelease": False}
|
||||
r = requests.post('https://api.github.com/repos/{}/releases'.format(script_env.get('REPO_NAME')), json=payload, auth=GITHUB_API_AUTH, headers=GITHUB_API_HEADERS)
|
||||
if r.status_code == 201:
|
||||
upload_url = r.json()['upload_url']
|
||||
upload_assets_for_github_release(upload_url, mod_name, mod_version, assets_dir_map[mod_name])
|
||||
print_status('Published GitHub release for {} {}'.format(mod_name, mod_version))
|
||||
else:
|
||||
print_status('ERROR: Failed to create GitHub release for {} {}'.format(mod_name, mod_version))
|
||||
|
||||
def run_create_packaged_release(working_dir):
|
||||
# After releasing, create a new venv, and pip install and verify then create
|
||||
# list of components for the package release step.
|
||||
print_status('Start installing CLI into venv')
|
||||
components_list = install_cli_into_venv()
|
||||
print_status('Finished installing CLI into venv')
|
||||
archive_dir = tempfile.mkdtemp()
|
||||
# create the packaged releases automatically
|
||||
args = ['python', '-m', 'scripts.automation.release.packaged', '--version', script_env.get('CLI_VERSION'), '--dest', archive_dir, '--components']
|
||||
for name, version in components_list:
|
||||
# The tag for this module is slightly different so make that change.
|
||||
if name == 'azure-cli-command-modules-nspkg':
|
||||
name = 'azure-cli-command_modules-nspkg'
|
||||
args.append('{}={}'.format(name, version))
|
||||
print_status(' '.join(args))
|
||||
check_call(args, cwd=working_dir)
|
||||
print_status('Created packaged release in dir {}'.format(archive_dir))
|
||||
# Get the sha256sum
|
||||
archive_file_name = os.listdir(archive_dir)[0]
|
||||
archive_file_path = os.path.join(archive_dir, archive_file_name)
|
||||
sha256 = hashlib.sha256()
|
||||
with open(archive_file_path, 'rb') as f:
|
||||
sha256.update(f.read())
|
||||
computed_hash = sha256.hexdigest()
|
||||
print_status('SHA256 of {} is {}'.format(archive_file_path, computed_hash))
|
||||
# Upload release archive to Azure Storage
|
||||
check_call(['az', 'storage', 'blob', 'upload', '--file', archive_file_path, '--name', archive_file_name, '--container-name', 'releases', '--connection-string', script_env.get('AZURE_STORAGE_CONNECTION_STRING')])
|
||||
archive_url = check_output(['az', 'storage', 'blob', 'url', '--name', archive_file_name, '--container-name', 'releases', '--connection-string', script_env.get('AZURE_STORAGE_CONNECTION_STRING'), '--output', 'tsv'])
|
||||
archive_url = str(archive_url, 'utf-8')
|
||||
archive_url = archive_url.strip()
|
||||
print_status('Archive URL is {}'.format(archive_url))
|
||||
|
||||
def configure_git(repo_working_dir):
|
||||
check_call(['git', 'config', 'user.email', '{}@users.noreply.github.com'.format(script_env.get('GITHUB_USER'))], cwd=repo_working_dir)
|
||||
check_call(['git', 'config', 'user.name', script_env.get('GITHUB_USER')], cwd=repo_working_dir)
|
||||
check_call(['git', 'remote', 'set-url', 'origin', 'https://{}:{}@github.com/{}'.format(script_env.get('GITHUB_USER'), script_env.get('GITHUB_USER_TOKEN'), script_env.get('REPO_NAME'))], cwd=repo_working_dir)
|
||||
|
||||
if __name__ == "__main__":
|
||||
print_env_vars()
|
||||
give_chance_to_cancel('Create Git release commits')
|
||||
release_commitish_list = run_push_to_git()
|
||||
cli_repo_dir = set_up_cli_repo_dir()
|
||||
give_chance_to_cancel('Publish to PyPI')
|
||||
release_assets_dir_map = publish_to_pypi(cli_repo_dir, release_commitish_list)
|
||||
give_chance_to_cancel('Create GitHub releases and tags')
|
||||
run_create_github_release(release_commitish_list, release_assets_dir_map)
|
||||
give_chance_to_cancel('Create Packaged Release archive')
|
||||
run_create_packaged_release(cli_repo_dir)
|
||||
print_status('Done.')
|
Загрузка…
Ссылка в новой задаче