зеркало из https://github.com/microsoft/asconnect.git
v1.0.0 of asconnect
This commit is contained in:
Родитель
9196221c3b
Коммит
7d270c97dc
|
@ -1,350 +1,8 @@
|
|||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
##
|
||||
## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
|
||||
|
||||
# User-specific files
|
||||
*.rsuser
|
||||
*.suo
|
||||
*.user
|
||||
*.userosscache
|
||||
*.sln.docstates
|
||||
|
||||
# User-specific files (MonoDevelop/Xamarin Studio)
|
||||
*.userprefs
|
||||
|
||||
# Mono auto generated files
|
||||
mono_crash.*
|
||||
|
||||
# Build results
|
||||
[Dd]ebug/
|
||||
[Dd]ebugPublic/
|
||||
[Rr]elease/
|
||||
[Rr]eleases/
|
||||
x64/
|
||||
x86/
|
||||
[Aa][Rr][Mm]/
|
||||
[Aa][Rr][Mm]64/
|
||||
bld/
|
||||
[Bb]in/
|
||||
[Oo]bj/
|
||||
[Ll]og/
|
||||
[Ll]ogs/
|
||||
|
||||
# Visual Studio 2015/2017 cache/options directory
|
||||
.vs/
|
||||
# Uncomment if you have tasks that create the project's static files in wwwroot
|
||||
#wwwroot/
|
||||
|
||||
# Visual Studio 2017 auto generated files
|
||||
Generated\ Files/
|
||||
|
||||
# MSTest test Results
|
||||
[Tt]est[Rr]esult*/
|
||||
[Bb]uild[Ll]og.*
|
||||
|
||||
# NUnit
|
||||
*.VisualState.xml
|
||||
TestResult.xml
|
||||
nunit-*.xml
|
||||
|
||||
# Build Results of an ATL Project
|
||||
[Dd]ebugPS/
|
||||
[Rr]eleasePS/
|
||||
dlldata.c
|
||||
|
||||
# Benchmark Results
|
||||
BenchmarkDotNet.Artifacts/
|
||||
|
||||
# .NET Core
|
||||
project.lock.json
|
||||
project.fragment.lock.json
|
||||
artifacts/
|
||||
|
||||
# StyleCop
|
||||
StyleCopReport.xml
|
||||
|
||||
# Files built by Visual Studio
|
||||
*_i.c
|
||||
*_p.c
|
||||
*_h.h
|
||||
*.ilk
|
||||
*.meta
|
||||
*.obj
|
||||
*.iobj
|
||||
*.pch
|
||||
*.pdb
|
||||
*.ipdb
|
||||
*.pgc
|
||||
*.pgd
|
||||
*.rsp
|
||||
*.sbr
|
||||
*.tlb
|
||||
*.tli
|
||||
*.tlh
|
||||
*.tmp
|
||||
*.tmp_proj
|
||||
*_wpftmp.csproj
|
||||
*.log
|
||||
*.vspscc
|
||||
*.vssscc
|
||||
.builds
|
||||
*.pidb
|
||||
*.svclog
|
||||
*.scc
|
||||
|
||||
# Chutzpah Test files
|
||||
_Chutzpah*
|
||||
|
||||
# Visual C++ cache files
|
||||
ipch/
|
||||
*.aps
|
||||
*.ncb
|
||||
*.opendb
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.VC.db
|
||||
*.VC.VC.opendb
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
*.vsp
|
||||
*.vspx
|
||||
*.sap
|
||||
|
||||
# Visual Studio Trace Files
|
||||
*.e2e
|
||||
|
||||
# TFS 2012 Local Workspace
|
||||
$tf/
|
||||
|
||||
# Guidance Automation Toolkit
|
||||
*.gpState
|
||||
|
||||
# ReSharper is a .NET coding add-in
|
||||
_ReSharper*/
|
||||
*.[Rr]e[Ss]harper
|
||||
*.DotSettings.user
|
||||
|
||||
# TeamCity is a build add-in
|
||||
_TeamCity*
|
||||
|
||||
# DotCover is a Code Coverage Tool
|
||||
*.dotCover
|
||||
|
||||
# AxoCover is a Code Coverage Tool
|
||||
.axoCover/*
|
||||
!.axoCover/settings.json
|
||||
|
||||
# Visual Studio code coverage results
|
||||
*.coverage
|
||||
*.coveragexml
|
||||
|
||||
# NCrunch
|
||||
_NCrunch_*
|
||||
.*crunch*.local.xml
|
||||
nCrunchTemp_*
|
||||
|
||||
# MightyMoose
|
||||
*.mm.*
|
||||
AutoTest.Net/
|
||||
|
||||
# Web workbench (sass)
|
||||
.sass-cache/
|
||||
|
||||
# Installshield output folder
|
||||
[Ee]xpress/
|
||||
|
||||
# DocProject is a documentation generator add-in
|
||||
DocProject/buildhelp/
|
||||
DocProject/Help/*.HxT
|
||||
DocProject/Help/*.HxC
|
||||
DocProject/Help/*.hhc
|
||||
DocProject/Help/*.hhk
|
||||
DocProject/Help/*.hhp
|
||||
DocProject/Help/Html2
|
||||
DocProject/Help/html
|
||||
|
||||
# Click-Once directory
|
||||
publish/
|
||||
|
||||
# Publish Web Output
|
||||
*.[Pp]ublish.xml
|
||||
*.azurePubxml
|
||||
# Note: Comment the next line if you want to checkin your web deploy settings,
|
||||
# but database connection strings (with potential passwords) will be unencrypted
|
||||
*.pubxml
|
||||
*.publishproj
|
||||
|
||||
# Microsoft Azure Web App publish settings. Comment the next line if you want to
|
||||
# checkin your Azure Web App publish settings, but sensitive information contained
|
||||
# in these scripts will be unencrypted
|
||||
PublishScripts/
|
||||
|
||||
# NuGet Packages
|
||||
*.nupkg
|
||||
# NuGet Symbol Packages
|
||||
*.snupkg
|
||||
# The packages folder can be ignored because of Package Restore
|
||||
**/[Pp]ackages/*
|
||||
# except build/, which is used as an MSBuild target.
|
||||
!**/[Pp]ackages/build/
|
||||
# Uncomment if necessary however generally it will be regenerated when needed
|
||||
#!**/[Pp]ackages/repositories.config
|
||||
# NuGet v3's project.json files produces more ignorable files
|
||||
*.nuget.props
|
||||
*.nuget.targets
|
||||
|
||||
# Microsoft Azure Build Output
|
||||
csx/
|
||||
*.build.csdef
|
||||
|
||||
# Microsoft Azure Emulator
|
||||
ecf/
|
||||
rcf/
|
||||
|
||||
# Windows Store app package directories and files
|
||||
AppPackages/
|
||||
BundleArtifacts/
|
||||
Package.StoreAssociation.xml
|
||||
_pkginfo.txt
|
||||
*.appx
|
||||
*.appxbundle
|
||||
*.appxupload
|
||||
|
||||
# Visual Studio cache files
|
||||
# files ending in .cache can be ignored
|
||||
*.[Cc]ache
|
||||
# but keep track of directories ending in .cache
|
||||
!?*.[Cc]ache/
|
||||
|
||||
# Others
|
||||
ClientBin/
|
||||
~$*
|
||||
*~
|
||||
*.dbmdl
|
||||
*.dbproj.schemaview
|
||||
*.jfm
|
||||
*.pfx
|
||||
*.publishsettings
|
||||
orleans.codegen.cs
|
||||
|
||||
# Including strong name files can present a security risk
|
||||
# (https://github.com/github/gitignore/pull/2483#issue-259490424)
|
||||
#*.snk
|
||||
|
||||
# Since there are multiple workflows, uncomment next line to ignore bower_components
|
||||
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
|
||||
#bower_components/
|
||||
|
||||
# RIA/Silverlight projects
|
||||
Generated_Code/
|
||||
|
||||
# Backup & report files from converting an old project file
|
||||
# to a newer Visual Studio version. Backup files are not needed,
|
||||
# because we have git ;-)
|
||||
_UpgradeReport_Files/
|
||||
Backup*/
|
||||
UpgradeLog*.XML
|
||||
UpgradeLog*.htm
|
||||
ServiceFabricBackup/
|
||||
*.rptproj.bak
|
||||
|
||||
# SQL Server files
|
||||
*.mdf
|
||||
*.ldf
|
||||
*.ndf
|
||||
|
||||
# Business Intelligence projects
|
||||
*.rdl.data
|
||||
*.bim.layout
|
||||
*.bim_*.settings
|
||||
*.rptproj.rsuser
|
||||
*- [Bb]ackup.rdl
|
||||
*- [Bb]ackup ([0-9]).rdl
|
||||
*- [Bb]ackup ([0-9][0-9]).rdl
|
||||
|
||||
# Microsoft Fakes
|
||||
FakesAssemblies/
|
||||
|
||||
# GhostDoc plugin setting file
|
||||
*.GhostDoc.xml
|
||||
|
||||
# Node.js Tools for Visual Studio
|
||||
.ntvs_analysis.dat
|
||||
node_modules/
|
||||
|
||||
# Visual Studio 6 build log
|
||||
*.plg
|
||||
|
||||
# Visual Studio 6 workspace options file
|
||||
*.opt
|
||||
|
||||
# Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
|
||||
*.vbw
|
||||
|
||||
# Visual Studio LightSwitch build output
|
||||
**/*.HTMLClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/GeneratedArtifacts
|
||||
**/*.DesktopClient/ModelManifest.xml
|
||||
**/*.Server/GeneratedArtifacts
|
||||
**/*.Server/ModelManifest.xml
|
||||
_Pvt_Extensions
|
||||
|
||||
# Paket dependency manager
|
||||
.paket/paket.exe
|
||||
paket-files/
|
||||
|
||||
# FAKE - F# Make
|
||||
.fake/
|
||||
|
||||
# CodeRush personal settings
|
||||
.cr/personal
|
||||
|
||||
# Python Tools for Visual Studio (PTVS)
|
||||
__pycache__/
|
||||
*.pyc
|
||||
|
||||
# Cake - Uncomment if you are using it
|
||||
# tools/**
|
||||
# !tools/packages.config
|
||||
|
||||
# Tabs Studio
|
||||
*.tss
|
||||
|
||||
# Telerik's JustMock configuration file
|
||||
*.jmconfig
|
||||
|
||||
# BizTalk build output
|
||||
*.btp.cs
|
||||
*.btm.cs
|
||||
*.odx.cs
|
||||
*.xsd.cs
|
||||
|
||||
# OpenCover UI analysis results
|
||||
OpenCover/
|
||||
|
||||
# Azure Stream Analytics local run output
|
||||
ASALocalRun/
|
||||
|
||||
# MSBuild Binary and Structured Log
|
||||
*.binlog
|
||||
|
||||
# NVidia Nsight GPU debugger configuration file
|
||||
*.nvuser
|
||||
|
||||
# MFractors (Xamarin productivity tool) working folder
|
||||
.mfractor/
|
||||
|
||||
# Local History for Visual Studio
|
||||
.localhistory/
|
||||
|
||||
# BeatPulse healthcheck temp database
|
||||
healthchecksdb
|
||||
|
||||
# Backup folder for Package Reference Convert tool in Visual Studio 2017
|
||||
MigrationBackup/
|
||||
|
||||
# Ionide (cross platform F# VS Code tools) working folder
|
||||
.ionide/
|
||||
**/__pycache__/**
|
||||
**/.mypy_cache/**
|
||||
asconnect.egg-info
|
||||
dist
|
||||
venv
|
||||
**/.pytest_cache/**
|
||||
.env
|
||||
tests/test_data.json
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Current File",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": false
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
{
|
||||
"editor.tabSize": 4,
|
||||
"editor.insertSpaces": true,
|
||||
// Configure glob patterns for excluding files and folders.
|
||||
"files.exclude": {
|
||||
"**/*.pyc": true,
|
||||
"**/.vscode/**": true,
|
||||
"**/__pycache__/**": true,
|
||||
"**/.pytest_cache/**": true,
|
||||
"**/.mypy_cache/**": true,
|
||||
"venv/**": true
|
||||
},
|
||||
"python.pythonPath": "${workspaceFolder}/venv/bin/python3",
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.pylintPath": "${workspaceFolder}/venv/bin/pylint",
|
||||
"python.linting.pylintArgs": [
|
||||
"--rcfile=${workspaceFolder}/pylintrc",
|
||||
"--extension-pkg-whitelist=mypy",
|
||||
"--init-hook",
|
||||
"import sys; sys.path.insert(0, '${workspaceFolder}/asconnect')"
|
||||
],
|
||||
"python.linting.mypyEnabled": true,
|
||||
"python.linting.mypyPath": "${workspaceFolder}/venv/bin/mypy",
|
||||
"python.linting.mypyArgs": [
|
||||
"--config-file=${workspaceFolder}/mypy.ini"
|
||||
],
|
||||
"python.jediEnabled": true,
|
||||
"editor.formatOnSave": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackPath": "${workspaceFolder}/venv/bin/black",
|
||||
"python.formatting.blackArgs": [
|
||||
"--line-length",
|
||||
"100"
|
||||
],
|
||||
"python.testing.pytestArgs": [
|
||||
"tests"
|
||||
],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.nosetestsEnabled": false,
|
||||
"python.testing.pytestEnabled": true,
|
||||
"python.languageServer": "Jedi",
|
||||
}
|
|
@ -0,0 +1,9 @@
|
|||
# Microsoft Open Source Code of Conduct
|
||||
|
||||
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
|
||||
|
||||
Resources:
|
||||
|
||||
- [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/)
|
||||
- [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
|
||||
- Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
|
@ -0,0 +1,90 @@
|
|||
# asconnect
|
||||
|
||||
asconnect is a Python wrapper around the [Apple App Store Connect REST APIs](https://developer.apple.com/documentation/appstoreconnectapi).
|
||||
|
||||
This wrapper does not cover every API, but does cover the basics, including:
|
||||
|
||||
* Uploading a build
|
||||
* Creating a new TestFlight version
|
||||
* Setting TestFlight review information
|
||||
* Creating a new app store version
|
||||
* Setting the app review information
|
||||
* Submitting for app review
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Installation
|
||||
|
||||
The package is available on PyPI, so you can run `pip install asconnect` to get the latest version.
|
||||
|
||||
### Creating a client
|
||||
|
||||
To begin, you need to [generate a key](https://developer.apple.com/documentation/appstoreconnectapi/creating_api_keys_for_app_store_connect_api), then get it's ID, the contents of the key itself, and the issuer ID.
|
||||
|
||||
Once you have those, you can create a new client by running:
|
||||
|
||||
```python
|
||||
client = asconnect.Client(key_id="...", key_contents="...", issuer_id="...")
|
||||
```
|
||||
|
||||
### Getting your App
|
||||
|
||||
Most operations require an app identifier. This is not the same as the bundle ID you choose, but is an ID generated by Apple. The easiest way to get this is to run this code:
|
||||
|
||||
```python
|
||||
app = client.app.get_from_bundle_id("com.example.my_bundle_id")
|
||||
```
|
||||
|
||||
### Uploading a Build
|
||||
|
||||
Uploading a build isn't technically part of the App Store Connect APIs, but a wrapper around itmstransporter is included to make things as easy as possible. Let's upload a build for your app:
|
||||
|
||||
```python
|
||||
asconnect.itms.upload_build(
|
||||
ipa_path="/path/to/the/app.ipa",
|
||||
bundle_id="com.example.my_bundle_id",
|
||||
app_id=app.identifier, # Taken from the call above
|
||||
username=username, # ITC username
|
||||
password=password, # ITC password
|
||||
)
|
||||
```
|
||||
|
||||
And if you want to wait for your build to finish processing:
|
||||
|
||||
```python
|
||||
build = client.build.wait_for_build_to_process("com.example.my_bundle_id", build_number)
|
||||
```
|
||||
|
||||
`build_number` is the build number you gave your build when you created it. It's used by the app store to identify the build.
|
||||
|
||||
### App Store Submission
|
||||
|
||||
Let's take that build, create a new app store version and submit it,
|
||||
|
||||
```python
|
||||
# Create a new version
|
||||
version = client.app.create_new_version(version="1.2.3", app_id=app.identifier)
|
||||
|
||||
# Set the build for that version
|
||||
client.version.set_build(version_id=version.identifier, build_id=build.identifier)
|
||||
|
||||
# Submit for review
|
||||
client.version.submit_for_review(version_id=version.identifier)
|
||||
```
|
||||
|
||||
It's that easy. Most of the time at least. If you don't have previous version to inherit information from you'll need to do things like set screenshots, reviewer info, etc. All of which is possible through this library.
|
||||
|
||||
|
||||
# Contributing
|
||||
|
||||
This project welcomes contributions and suggestions. Most contributions require you to agree to a
|
||||
Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us
|
||||
the rights to use your contribution. For details, visit https://cla.opensource.microsoft.com.
|
||||
|
||||
When you submit a pull request, a CLA bot will automatically determine whether you need to provide
|
||||
a CLA and decorate the PR appropriately (e.g., status check, comment). Simply follow the instructions
|
||||
provided by the bot. You will only need to do this once across all repos using our CLA.
|
||||
|
||||
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
|
||||
For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or
|
||||
contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
|
|
@ -0,0 +1,41 @@
|
|||
<!-- BEGIN MICROSOFT SECURITY.MD V0.0.5 BLOCK -->
|
||||
|
||||
## Security
|
||||
|
||||
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
|
||||
|
||||
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below.
|
||||
|
||||
## Reporting Security Issues
|
||||
|
||||
**Please do not report security vulnerabilities through public GitHub issues.**
|
||||
|
||||
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
|
||||
|
||||
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc).
|
||||
|
||||
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
|
||||
|
||||
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
|
||||
|
||||
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
|
||||
* Full paths of source file(s) related to the manifestation of the issue
|
||||
* The location of the affected source code (tag/branch/commit or direct URL)
|
||||
* Any special configuration required to reproduce the issue
|
||||
* Step-by-step instructions to reproduce the issue
|
||||
* Proof-of-concept or exploit code (if possible)
|
||||
* Impact of the issue, including how an attacker might exploit the issue
|
||||
|
||||
This information will help us triage your report more quickly.
|
||||
|
||||
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
|
||||
|
||||
## Preferred Languages
|
||||
|
||||
We prefer all communications to be in English.
|
||||
|
||||
## Policy
|
||||
|
||||
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd).
|
||||
|
||||
<!-- END MICROSOFT SECURITY.MD BLOCK -->
|
|
@ -0,0 +1,12 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""Wrapper around the Apple App Store Connect APIs."""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
from asconnect.client import Client
|
||||
from asconnect.models import App
|
||||
from asconnect.models import Build
|
||||
|
||||
from asconnect.itms import upload_build
|
|
@ -0,0 +1,96 @@
|
|||
"""Wrapper around the Apple App Store Connect APIs."""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import logging
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
from asconnect.httpclient import HttpClient
|
||||
from asconnect.models import App, AppStoreVersion, Platform
|
||||
|
||||
|
||||
class AppClient:
|
||||
"""Wrapper class around the ASC API."""
|
||||
|
||||
log: logging.Logger
|
||||
http_client: HttpClient
|
||||
|
||||
def __init__(self, *, http_client: HttpClient, log: logging.Logger,) -> None:
|
||||
"""Construct a new client object.
|
||||
|
||||
:param http_client: The API HTTP client
|
||||
:param log: Any base logger to be used (one will be created if not supplied)
|
||||
"""
|
||||
|
||||
self.http_client = http_client
|
||||
self.log = log.getChild("app")
|
||||
|
||||
def get_all(self, url: Optional[str] = None,) -> Iterator[App]:
|
||||
"""Get all apps.
|
||||
|
||||
:param Optional[str] url: The URL to use (will be generated if not supplied)
|
||||
|
||||
:returns: A list of apps
|
||||
"""
|
||||
|
||||
url = self.http_client.generate_url("apps")
|
||||
yield from self.http_client.get(url=url, data_type=List[App])
|
||||
|
||||
def get_from_bundle_id(self, bundle_id: str) -> Optional[App]:
|
||||
"""Get a particular app.
|
||||
|
||||
:param bundle_id: The bundle ID of the app to get
|
||||
|
||||
:returns: The app if found, None otherwise
|
||||
"""
|
||||
for app in self.get_all():
|
||||
if app.bundle_id == bundle_id:
|
||||
return app
|
||||
return None
|
||||
|
||||
def create_new_version(
|
||||
self,
|
||||
*,
|
||||
version: str,
|
||||
app_id: str,
|
||||
platform: Platform = Platform.ios,
|
||||
copyright_text: Optional[str] = None,
|
||||
uses_idfa: Optional[bool] = None,
|
||||
) -> AppStoreVersion:
|
||||
"""Create a new version on the app store.
|
||||
|
||||
:param version: The version to create
|
||||
:param app_id: The ID of the app
|
||||
:param platform: The platform this app is (defaults to iOS)
|
||||
:param copyright_text: The copyright string to use
|
||||
:param uses_idfa: Set to True if this app uses the advertising ID, false otherwise
|
||||
|
||||
:raises AppStoreConnectError: On a failure response
|
||||
|
||||
:returns: An AppStoreVersion
|
||||
"""
|
||||
|
||||
attributes = {
|
||||
"platform": platform.value,
|
||||
"versionString": version,
|
||||
"releaseType": "MANUAL", # TODO This should support scheduling
|
||||
}
|
||||
|
||||
if copyright_text:
|
||||
attributes["copyright"] = copyright_text
|
||||
|
||||
if uses_idfa is not None:
|
||||
attributes["usesIdfa"] = uses_idfa
|
||||
|
||||
return self.http_client.post(
|
||||
endpoint="appStoreVersions",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": attributes,
|
||||
"type": "appStoreVersions",
|
||||
"relationships": {"app": {"data": {"type": "apps", "id": app_id,}}},
|
||||
}
|
||||
},
|
||||
data_type=AppStoreVersion,
|
||||
)
|
|
@ -0,0 +1,169 @@
|
|||
"""Wrapper around the Apple App Store Connect APIs."""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import logging
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
from asconnect.httpclient import HttpClient
|
||||
from asconnect.models import AppInfoLocalization, AppInfo, AppStoreVersionLocalization
|
||||
from asconnect.utilities import update_query_parameters
|
||||
|
||||
|
||||
class AppInfoClient:
|
||||
"""Wrapper class around the ASC API."""
|
||||
|
||||
log: logging.Logger
|
||||
http_client: HttpClient
|
||||
|
||||
def __init__(self, *, http_client: HttpClient, log: logging.Logger,) -> None:
|
||||
"""Construct a new client object.
|
||||
|
||||
:param http_client: The API HTTP client
|
||||
:param log: Any base logger to be used (one will be created if not supplied)
|
||||
"""
|
||||
|
||||
self.http_client = http_client
|
||||
self.log = log.getChild("appinfo")
|
||||
|
||||
def get_app_info(self, *, app_id: str) -> List[AppInfo]:
|
||||
"""Get the app info for an app.
|
||||
|
||||
If there are two, one will be marked as ready for sale, the other will
|
||||
be one that is being prepared.
|
||||
|
||||
:param app_id: The app ID to get the info for
|
||||
|
||||
:returns: A list to AppInfoLocalization
|
||||
"""
|
||||
url = self.http_client.generate_url(f"apps/{app_id}/appInfos")
|
||||
|
||||
return list(self.http_client.get(url=url, data_type=List[AppInfo]))
|
||||
|
||||
def get_localizations(
|
||||
self, *, app_info_id: str, locale: Optional[str] = None,
|
||||
) -> Iterator[AppInfoLocalization]:
|
||||
"""Get the app info for an app.
|
||||
|
||||
:param app_info_id: The app info ID to get the localized info for
|
||||
:param locale: The version to filter on (if any)
|
||||
|
||||
:returns: An iterator to AppInfoLocalization
|
||||
"""
|
||||
url = self.http_client.generate_url(f"appInfos/{app_info_id}/appInfoLocalizations")
|
||||
|
||||
query_parameters = {}
|
||||
|
||||
if locale:
|
||||
query_parameters["filter[locale]"] = locale
|
||||
|
||||
url = update_query_parameters(url, query_parameters)
|
||||
|
||||
yield from self.http_client.get(url=url, data_type=List[AppInfoLocalization])
|
||||
|
||||
def set_localization_properties(
|
||||
self,
|
||||
*,
|
||||
localization_id: str,
|
||||
name: Optional[str] = None,
|
||||
privacy_policy_text: Optional[str] = None,
|
||||
privacy_policy_url: Optional[str] = None,
|
||||
subtitle: Optional[str] = None,
|
||||
) -> AppInfoLocalization:
|
||||
"""Set the properties on an app info localization
|
||||
|
||||
Any left as None will be ignored.
|
||||
|
||||
:param localization_id: The ID of the localization to patch
|
||||
:param name: The name of the app
|
||||
:param privacy_policy_text: The text of the privacy policy
|
||||
:param privacy_policy_url: The URL of the privacy policy
|
||||
:param subtitle: The sub-title for the app
|
||||
|
||||
:returns: The new updated app info localization
|
||||
"""
|
||||
|
||||
attributes = {}
|
||||
|
||||
if name:
|
||||
attributes["name"] = name
|
||||
|
||||
if privacy_policy_text:
|
||||
attributes["privacyPolicyText"] = privacy_policy_text
|
||||
|
||||
if privacy_policy_url:
|
||||
attributes["privacyPolicyUrl"] = privacy_policy_url
|
||||
|
||||
if subtitle:
|
||||
attributes["subtitle"] = subtitle
|
||||
|
||||
return self.http_client.patch(
|
||||
endpoint=f"appInfoLocalizations/{localization_id}",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": attributes,
|
||||
"type": "appInfoLocalizations",
|
||||
"id": localization_id,
|
||||
}
|
||||
},
|
||||
data_type=AppInfoLocalization,
|
||||
)
|
||||
|
||||
def set_localization_version_properties(
|
||||
self,
|
||||
*,
|
||||
version_localization_id: str,
|
||||
description: Optional[str] = None,
|
||||
keywords: Optional[str] = None,
|
||||
marketing_url: Optional[str] = None,
|
||||
promotional_text: Optional[str] = None,
|
||||
support_url: Optional[str] = None,
|
||||
whats_new: Optional[str] = None,
|
||||
) -> AppInfoLocalization:
|
||||
"""Set the properties on an app version localization
|
||||
|
||||
Any left as None will be ignored.
|
||||
|
||||
:param version_localization_id: The ID of the localization to patch
|
||||
:param description: The description for the app store
|
||||
:param keywords: The keywords for the app (comma separated)
|
||||
:param marketing_url: The marketing URL
|
||||
:param promotional_text: Any promotional text (can be set after submission)
|
||||
:param support_url: The support URL
|
||||
:param whats_new: The What's New text for the version (release notes)
|
||||
|
||||
:returns: The new updated app version localization
|
||||
"""
|
||||
|
||||
attributes = {}
|
||||
|
||||
if description:
|
||||
attributes["description"] = description
|
||||
|
||||
if keywords:
|
||||
attributes["keywords"] = keywords
|
||||
|
||||
if marketing_url:
|
||||
attributes["marketingUrl"] = marketing_url
|
||||
|
||||
if promotional_text:
|
||||
attributes["promotionalText"] = promotional_text
|
||||
|
||||
if support_url:
|
||||
attributes["supportUrl"] = support_url
|
||||
|
||||
if whats_new:
|
||||
attributes["whatsNew"] = whats_new
|
||||
|
||||
return self.http_client.patch(
|
||||
endpoint=f"appStoreVersionLocalizations/{version_localization_id}",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": attributes,
|
||||
"type": "appStoreVersionLocalizations",
|
||||
"id": version_localization_id,
|
||||
}
|
||||
},
|
||||
data_type=AppStoreVersionLocalization,
|
||||
)
|
|
@ -0,0 +1,230 @@
|
|||
"""Wrapper around the Apple App Store Connect APIs."""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, Iterator, List, Optional
|
||||
|
||||
from asconnect.httpclient import HttpClient
|
||||
|
||||
from asconnect.models import (
|
||||
BetaAppLocalization,
|
||||
BetaAppReviewDetail,
|
||||
BetaBuildLocalization,
|
||||
BetaGroup,
|
||||
)
|
||||
|
||||
|
||||
class BetaReviewClient:
|
||||
"""Wrapper class around the ASC API."""
|
||||
|
||||
log: logging.Logger
|
||||
http_client: HttpClient
|
||||
|
||||
def __init__(self, *, http_client: HttpClient, log: logging.Logger,) -> None:
|
||||
"""Construct a new client object.
|
||||
|
||||
:param http_client: The API HTTP client
|
||||
:param log: Any base logger to be used (one will be created if not supplied)
|
||||
"""
|
||||
|
||||
self.http_client = http_client
|
||||
self.log = log.getChild("beta_review")
|
||||
|
||||
def set_beta_app_review_details(
|
||||
self,
|
||||
*,
|
||||
app_id: str,
|
||||
contact_email: str,
|
||||
contact_first_name: str,
|
||||
contact_last_name: str,
|
||||
contact_phone: str,
|
||||
demo_account_name: Optional[str] = None,
|
||||
demo_account_password: Optional[str] = None,
|
||||
demo_account_required: Optional[bool] = None,
|
||||
notes: Optional[str] = None,
|
||||
) -> BetaAppReviewDetail:
|
||||
"""Set the Beta app review details.
|
||||
|
||||
:param str app_id: The Apple ID for the app
|
||||
:param str contact_email: The email for the app review contact
|
||||
:param str contact_first_name: The first name for the app review contact
|
||||
:param str contact_last_name: The last name for the app review contact
|
||||
:param str contact_phone: The phone number for the app review contact
|
||||
:param Optional[str] demo_account_name: The username for the demo account
|
||||
:param Optional[str] demo_account_password: The password for the demo account
|
||||
:param Optional[bool] demo_account_required: Set to True to mark the demo account as required
|
||||
:param Optional[str] notes: Any notes for the reviewer
|
||||
|
||||
:returns: The raw response
|
||||
"""
|
||||
attributes: Dict[str, Any] = {
|
||||
"contactEmail": contact_email,
|
||||
"contactFirstName": contact_first_name,
|
||||
"contactLastName": contact_last_name,
|
||||
"contactPhone": contact_phone,
|
||||
}
|
||||
|
||||
if demo_account_name is not None:
|
||||
attributes["demoAccountName"] = demo_account_name
|
||||
|
||||
if demo_account_password is not None:
|
||||
attributes["demoAccountPassword"] = demo_account_password
|
||||
|
||||
if demo_account_required is not None:
|
||||
attributes["demoAccountRequired"] = demo_account_required
|
||||
|
||||
if notes is not None:
|
||||
attributes["notes"] = notes
|
||||
|
||||
body = {"data": {"attributes": attributes, "id": app_id, "type": "betaAppReviewDetails"}}
|
||||
|
||||
return self.http_client.patch(
|
||||
endpoint=f"betaAppReviewDetails/{app_id}", data=body, data_type=BetaAppReviewDetail
|
||||
)
|
||||
|
||||
def get_beta_app_localizations(self, app_id: str) -> Iterator[BetaAppLocalization]:
|
||||
"""Get the beta app localizations.
|
||||
|
||||
:param app_id: The apple identifier for the app to get the localizations for
|
||||
|
||||
:returns: An iterator to the beta app localizations
|
||||
"""
|
||||
url = self.http_client.generate_url(f"apps/{app_id}/betaAppLocalizations")
|
||||
yield from self.http_client.get(url=url, data_type=List[BetaAppLocalization])
|
||||
|
||||
def get_beta_build_localizations(self, build_id: str) -> Iterator[BetaBuildLocalization]:
|
||||
"""Get the beta app localizations.
|
||||
|
||||
:param build_id: The identifier for the build to get the localizations for
|
||||
|
||||
:returns: An iterator to the beta app localizations
|
||||
"""
|
||||
url = self.http_client.generate_url(f"betaBuildLocalizations?filter[build]={build_id}")
|
||||
yield from self.http_client.get(url=url, data_type=List[BetaBuildLocalization])
|
||||
|
||||
def set_beta_app_localizations(
|
||||
self, app_id: str, localizations: Dict[str, Dict[str, str]]
|
||||
) -> None:
|
||||
"""Set the app localizations.
|
||||
|
||||
:param app_id: The apple identifier for the app to set the localizations for
|
||||
:param localizations: A dictionary of language codes to localization attributes
|
||||
"""
|
||||
|
||||
existing_localizations = {}
|
||||
|
||||
for localization in self.get_beta_app_localizations(app_id):
|
||||
existing_localizations[localization.attributes.locale] = localization
|
||||
|
||||
for language_code, language_info in localizations.items():
|
||||
existing_localization = existing_localizations.get(language_code)
|
||||
|
||||
if existing_localization:
|
||||
self.http_client.patch(
|
||||
endpoint=f"betaAppLocalizations/{existing_localization.identifier}",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": language_info,
|
||||
"id": existing_localization.identifier,
|
||||
"type": "betaAppLocalizations",
|
||||
}
|
||||
},
|
||||
data_type=BetaAppLocalization,
|
||||
)
|
||||
else:
|
||||
language_info["locale"] = language_code
|
||||
self.http_client.post(
|
||||
endpoint="betaAppLocalizations",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": language_info,
|
||||
"type": "betaAppLocalizations",
|
||||
"relationships": {"app": {"data": {"type": "apps", "id": app_id,}}},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def set_whats_new_for_build(self, build_id: str, localizations: Dict[str, str]) -> None:
|
||||
"""Set the whats new for a build.
|
||||
|
||||
:param build_id: The apple identifier for the app to set the localizations for
|
||||
:param localizations: A dictionary of language codes to localization info
|
||||
"""
|
||||
|
||||
existing_localizations = {}
|
||||
|
||||
for localization in self.get_beta_build_localizations(build_id):
|
||||
existing_localizations[localization.attributes.locale] = localization
|
||||
|
||||
for language_code, whats_new in localizations.items():
|
||||
attributes = {"whatsNew": whats_new}
|
||||
|
||||
existing_localization = existing_localizations.get(language_code)
|
||||
|
||||
if existing_localization:
|
||||
self.http_client.patch(
|
||||
endpoint=f"betaBuildLocalizations/{existing_localization.identifier}",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": attributes,
|
||||
"id": existing_localization.identifier,
|
||||
"type": "betaBuildLocalizations",
|
||||
}
|
||||
},
|
||||
data_type=BetaBuildLocalization,
|
||||
)
|
||||
else:
|
||||
attributes["locale"] = language_code
|
||||
self.http_client.post(
|
||||
endpoint="betaBuildLocalizations",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": attributes,
|
||||
"type": "betaBuildLocalizations",
|
||||
"relationships": {
|
||||
"build": {"data": {"type": "builds", "id": build_id,}}
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def get_beta_groups(self, app_id: str) -> Iterator[BetaGroup]:
|
||||
"""Get the beta groups
|
||||
|
||||
:param app_id: The ID of the app to filter on
|
||||
|
||||
:returns: An iterator to the beta groups
|
||||
"""
|
||||
url = self.http_client.generate_url(f"betaGroups?filter[app]={app_id}")
|
||||
yield from self.http_client.get(url=url, data_type=List[BetaGroup])
|
||||
|
||||
def set_beta_groups_on_build(self, build_id: str, beta_groups: List[BetaGroup]) -> None:
|
||||
"""Set the Beta groups on a build.
|
||||
|
||||
:param build_id: The build ID for the build to set the groups on
|
||||
:param beta_groups: The groups to add
|
||||
"""
|
||||
data = []
|
||||
for beta_group in beta_groups:
|
||||
data.append({"type": "betaGroups", "id": beta_group.identifier})
|
||||
|
||||
body = {"data": data}
|
||||
|
||||
self.http_client.post(endpoint=f"builds/{build_id}/relationships/betaGroups", data=body)
|
||||
|
||||
def submit_for_beta_review(self, build_id: str) -> None:
|
||||
"""Submit a build for beta review
|
||||
|
||||
:param build_id: The build ID for the build to set the groups on
|
||||
"""
|
||||
|
||||
body = {
|
||||
"data": {
|
||||
"type": "betaAppReviewSubmissions",
|
||||
"relationships": {"build": {"data": {"id": build_id, "type": "builds",}}},
|
||||
}
|
||||
}
|
||||
|
||||
self.http_client.post(endpoint="betaAppReviewSubmissions", data=body)
|
|
@ -0,0 +1,150 @@
|
|||
"""Wrapper around the Apple App Store Connect APIs."""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import logging
|
||||
import time
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
from asconnect.httpclient import HttpClient
|
||||
|
||||
from asconnect.models import App, Build, BuildBetaDetail
|
||||
from asconnect.sorting import BuildsSort
|
||||
from asconnect.utilities import update_query_parameters, next_or_none
|
||||
|
||||
|
||||
class BuildClient:
|
||||
"""Wrapper class around the ASC API."""
|
||||
|
||||
log: logging.Logger
|
||||
http_client: HttpClient
|
||||
|
||||
def __init__(self, *, http_client: HttpClient, log: logging.Logger,) -> None:
|
||||
"""Construct a new client object.
|
||||
|
||||
:param http_client: The API HTTP client
|
||||
:param log: Any base logger to be used (one will be created if not supplied)
|
||||
"""
|
||||
|
||||
self.http_client = http_client
|
||||
self.log = log.getChild("build")
|
||||
|
||||
def get_builds(
|
||||
self,
|
||||
*,
|
||||
url: Optional[str] = None,
|
||||
sort: Optional[BuildsSort] = None,
|
||||
build_number: Optional[str] = None,
|
||||
version: Optional[str] = None,
|
||||
app_id: Optional[str] = None,
|
||||
) -> Iterator[Build]:
|
||||
"""Get all builds.
|
||||
|
||||
:param Optional[str] url: The URL to use (will be generated if not supplied)
|
||||
:param Optional[BuildSort] sort: The sort option to use
|
||||
:param Optional[str] build_number: Filter to just this build number
|
||||
:param Optional[str] version: Filter to just this version
|
||||
:param Optional[str] app_id: Filter to just this app
|
||||
|
||||
:returns: A list of builds
|
||||
"""
|
||||
if url is None:
|
||||
url = self.http_client.generate_url("builds")
|
||||
|
||||
query_parameters = {}
|
||||
|
||||
if sort:
|
||||
query_parameters["sort"] = sort.value
|
||||
|
||||
if build_number:
|
||||
query_parameters["filter[version]"] = build_number
|
||||
|
||||
if app_id:
|
||||
query_parameters["filter[app]"] = app_id
|
||||
|
||||
if version:
|
||||
query_parameters["filter[preReleaseVersion.version]"] = version
|
||||
|
||||
url = update_query_parameters(url, query_parameters)
|
||||
|
||||
yield from self.http_client.get(url=url, data_type=List[Build])
|
||||
|
||||
def get_build_from_identifier(self, identifier: str) -> Optional[Build]:
|
||||
"""Get a build from its identifier
|
||||
|
||||
:param identifier: The unique identifier for the build (_not_ the build number)
|
||||
|
||||
:returns: A build if found, None otherwise
|
||||
"""
|
||||
url = self.http_client.generate_url(f"builds/{identifier}")
|
||||
|
||||
return next_or_none(self.http_client.get(url=url, data_type=Build))
|
||||
|
||||
def get_from_build_number(self, bundle_id: str, build_number: str) -> Optional[Build]:
|
||||
"""Get a build from its build number.
|
||||
|
||||
:param bundle_id: The bundle ID of the app
|
||||
:param build_number: The build number for the build to get
|
||||
|
||||
:returns: The build if found, None otherwise
|
||||
"""
|
||||
for build in self.get_builds(build_number=build_number):
|
||||
# TODO use app id directly for this
|
||||
assert build.relationships is not None
|
||||
related_link = build.relationships["app"].links.related
|
||||
assert related_link is not None
|
||||
app = next_or_none(self.http_client.get(url=related_link, data_type=App))
|
||||
|
||||
if not app:
|
||||
break
|
||||
|
||||
if app.bundle_id == bundle_id:
|
||||
return build
|
||||
|
||||
return None
|
||||
|
||||
def wait_for_build_to_process(
|
||||
self, bundle_id: str, build_number: str, wait_time: int = 30
|
||||
) -> Build:
|
||||
"""Wait for a build to finish processing.
|
||||
|
||||
:param bundle_id: The ID of the app
|
||||
:param build_number: The build number for the build to wait for
|
||||
:param wait_time: The time to wait between checks for processing completion in seconds
|
||||
|
||||
:returns: The build when finished processing
|
||||
"""
|
||||
build = None
|
||||
|
||||
while True:
|
||||
self.log.info("Waiting for build to appear...")
|
||||
build = self.get_from_build_number(bundle_id, build_number)
|
||||
if build is not None:
|
||||
break
|
||||
time.sleep(wait_time)
|
||||
|
||||
if build.attributes.processing_state != "PROCESSING":
|
||||
return build
|
||||
|
||||
while True:
|
||||
build = self.get_build_from_identifier(build.identifier)
|
||||
assert build is not None
|
||||
|
||||
if build.attributes.processing_state != "PROCESSING":
|
||||
return build
|
||||
self.log.info(
|
||||
f"Build {build_number} has not finished processing. Will check again in {wait_time} seconds..."
|
||||
)
|
||||
time.sleep(wait_time)
|
||||
|
||||
def get_beta_detail(self, build: Build) -> Optional[BuildBetaDetail]:
|
||||
"""Get the build beta details.
|
||||
|
||||
:param build: The build to get the beta details for
|
||||
|
||||
:returns: An iterator to the beta app localizations
|
||||
"""
|
||||
assert build.relationships is not None
|
||||
url = build.relationships["buildBetaDetail"].links.related
|
||||
return next_or_none(self.http_client.get(url=url, data_type=BuildBetaDetail))
|
|
@ -0,0 +1,62 @@
|
|||
"""Wrapper around the Apple App Store Connect APIs."""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
|
||||
from asconnect.httpclient import HttpClient
|
||||
from asconnect.app_client import AppClient
|
||||
from asconnect.app_info_client import AppInfoClient
|
||||
from asconnect.beta_review_client import BetaReviewClient
|
||||
from asconnect.build_client import BuildClient
|
||||
from asconnect.screenshot_client import ScreenshotClient
|
||||
from asconnect.version_client import VersionClient
|
||||
|
||||
# pylint: disable=too-many-public-methods
|
||||
|
||||
|
||||
class Client:
|
||||
"""Wrapper class around the ASC API."""
|
||||
|
||||
log: logging.Logger
|
||||
http_client: HttpClient
|
||||
|
||||
app: AppClient
|
||||
app_info: AppInfoClient
|
||||
beta_review: BetaReviewClient
|
||||
build: BuildClient
|
||||
screenshots: ScreenshotClient
|
||||
version: VersionClient
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
key_id: str,
|
||||
key_contents: str,
|
||||
issuer_id: str,
|
||||
log: Optional[logging.Logger] = None,
|
||||
) -> None:
|
||||
"""Construct a new client object.
|
||||
|
||||
:param key_id: The ID of your key (can be found in app store connect)
|
||||
:param key_contents: The contents of your key
|
||||
:param issuer_id: The contents of your key (can be found in app store connect
|
||||
:param log: Any base logger to be used (one will be created if not supplied)
|
||||
"""
|
||||
|
||||
self.http_client = HttpClient(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id)
|
||||
|
||||
if log is None:
|
||||
self.log = logging.getLogger("asconnect")
|
||||
else:
|
||||
self.log = log.getChild("asconnect")
|
||||
|
||||
self.app = AppClient(http_client=self.http_client, log=self.log)
|
||||
self.app_info = AppInfoClient(http_client=self.http_client, log=self.log)
|
||||
self.beta_review = BetaReviewClient(http_client=self.http_client, log=self.log)
|
||||
self.build = BuildClient(http_client=self.http_client, log=self.log)
|
||||
self.screenshots = ScreenshotClient(http_client=self.http_client, log=self.log)
|
||||
self.version = VersionClient(http_client=self.http_client, log=self.log)
|
|
@ -0,0 +1,44 @@
|
|||
"""Exceptions"""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
from typing import Any
|
||||
|
||||
|
||||
class AppStoreConnectError(Exception):
|
||||
"""An error response from the API."""
|
||||
|
||||
identifier: str
|
||||
code: str
|
||||
status: int
|
||||
title: str
|
||||
detail: str
|
||||
source: Any
|
||||
|
||||
def __init__(self, data: Any):
|
||||
"""Create a new instance.
|
||||
|
||||
:param data: The raw data from the response
|
||||
|
||||
:raises ValueError: If we can't decode the data
|
||||
"""
|
||||
|
||||
if not isinstance(data, dict):
|
||||
raise ValueError(f"Could not decode App Store Connect error: {data}")
|
||||
|
||||
data = data["errors"]
|
||||
|
||||
if not isinstance(data, list):
|
||||
raise ValueError(f"Could not decode App Store Connect error: {data}")
|
||||
|
||||
data = data[0]
|
||||
|
||||
self.identifier = data["id"]
|
||||
self.status = int(data["status"])
|
||||
self.code = data["code"]
|
||||
self.title = data["title"]
|
||||
self.detail = data["detail"]
|
||||
self.source = data.get("source")
|
||||
|
||||
super().__init__(f"[{self.status}] {self.title} ({self.code}): {self.detail}")
|
|
@ -0,0 +1,305 @@
|
|||
"""Wrapper around the Apple App Store Connect APIs."""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any, Dict, Iterator, Optional, Tuple, Type
|
||||
|
||||
import deserialize
|
||||
import jwt
|
||||
import requests
|
||||
|
||||
from asconnect.exceptions import AppStoreConnectError
|
||||
|
||||
|
||||
class HttpClient:
|
||||
"""Base HTTP client for the ASC API."""
|
||||
|
||||
key_id: str
|
||||
key_contents: str
|
||||
issuer_id: str
|
||||
log: logging.Logger
|
||||
|
||||
_cached_token_info: Optional[Tuple[str, datetime.datetime]]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
key_id: str,
|
||||
key_contents: str,
|
||||
issuer_id: str,
|
||||
log: Optional[logging.Logger] = None,
|
||||
) -> None:
|
||||
"""Construct a new client object.
|
||||
|
||||
:param key_id: The ID of your key (can be found in app store connect)
|
||||
:param key_contents: The contents of your key
|
||||
:param issuer_id: The contents of your key (can be found in app store connect
|
||||
:param log: Any base logger to be used (one will be created if not supplied)
|
||||
"""
|
||||
|
||||
self.key_id = key_id
|
||||
self.key_contents = key_contents
|
||||
self.issuer_id = issuer_id
|
||||
|
||||
if log is None:
|
||||
self.log = logging.getLogger("asconnect")
|
||||
else:
|
||||
self.log = log.getChild("asconnect")
|
||||
|
||||
self._cached_token_info = None
|
||||
|
||||
def generate_token(self) -> str:
|
||||
"""Generate a new JWT token.
|
||||
|
||||
:returns: The JWT token as a string
|
||||
"""
|
||||
|
||||
# Apple state that performance is improved if we re-use the same token
|
||||
# instead of generating a new one each time
|
||||
if self._cached_token_info is not None:
|
||||
cached_token, cached_expiration = self._cached_token_info
|
||||
if cached_expiration - datetime.datetime.now() > datetime.timedelta(minutes=1):
|
||||
return cached_token
|
||||
|
||||
# Tokens more than 20 minutes in the future are invalid.
|
||||
expiration = datetime.datetime.now() + datetime.timedelta(minutes=20)
|
||||
|
||||
# Details at https://developer.apple.com/documentation/appstoreconnectapi/generating_tokens_for_api_requests
|
||||
token = jwt.encode(
|
||||
{
|
||||
"iss": self.issuer_id,
|
||||
"exp": int(expiration.timestamp()),
|
||||
"aud": "appstoreconnect-v1",
|
||||
},
|
||||
self.key_contents,
|
||||
algorithm="ES256",
|
||||
headers={"kid": self.key_id, "typ": "JWT"},
|
||||
).decode("utf-8")
|
||||
|
||||
self._cached_token_info = (token, expiration)
|
||||
|
||||
return token
|
||||
|
||||
def generate_url(self, endpoint: str) -> str:
|
||||
"""Generate a URL for an endpoint.
|
||||
|
||||
:param endpoint: The endpoint to generate the URL for
|
||||
|
||||
:returns: An endpoint URL
|
||||
"""
|
||||
_ = self
|
||||
return f"https://api.appstoreconnect.apple.com/v1/{endpoint}"
|
||||
|
||||
def get(
|
||||
self, *, data_type: Type, endpoint: Optional[str] = None, url: Optional[str] = None,
|
||||
) -> Iterator[Any]:
|
||||
"""Perform a GET to the endpoint specified.
|
||||
|
||||
Either endpoint or url must be specified. url will take precedence if
|
||||
both are specified.
|
||||
|
||||
:param Type data_type: The class to deserialize the data of the response to
|
||||
:param Optional[str] endpoint: The endpoint to perform the GET on
|
||||
:param Optional[str] url: The full URL to perform the GET on
|
||||
|
||||
:raises ValueError: If neither url or endpoint are specified
|
||||
|
||||
:returns: The raw response
|
||||
"""
|
||||
token = self.generate_token()
|
||||
|
||||
if url is None:
|
||||
if endpoint is None:
|
||||
raise ValueError("Either `endpoint` or `url` must be set")
|
||||
url = self.generate_url(endpoint)
|
||||
|
||||
while True:
|
||||
raw_response = requests.get(url, headers={"Authorization": f"Bearer {token}"},)
|
||||
response_data = self.extract_data(raw_response)
|
||||
|
||||
if response_data["data"] is None:
|
||||
yield from []
|
||||
else:
|
||||
deserialized_data = deserialize.deserialize(
|
||||
data_type, response_data["data"], throw_on_unhandled=True
|
||||
)
|
||||
|
||||
if isinstance(deserialized_data, list):
|
||||
yield from deserialized_data
|
||||
else:
|
||||
yield deserialized_data
|
||||
|
||||
if response_data.get("links") is None:
|
||||
break
|
||||
|
||||
if response_data["links"].get("next") is None:
|
||||
break
|
||||
|
||||
url = response_data["links"]["next"]
|
||||
assert url is not None
|
||||
|
||||
def patch(
|
||||
self,
|
||||
*,
|
||||
data_type: Optional[Type],
|
||||
endpoint: Optional[str] = None,
|
||||
url: Optional[str] = None,
|
||||
data: Any,
|
||||
) -> Any:
|
||||
"""Perform a PATCH to the endpoint specified.
|
||||
|
||||
Either endpoint or url must be specified. url will take precedence if
|
||||
both are specified.
|
||||
|
||||
:param Optional[Type] data_type: The class to deserialize the data of the response to
|
||||
:param Optional[str] endpoint: The endpoint to perform the GET on
|
||||
:param Optional[str] url: The full URL to perform the GET on
|
||||
:param Any data: Some JSON serializable data to send
|
||||
|
||||
:raises AppStoreConnectError: If we don't get a 200 response back
|
||||
:raises ValueError: If neither url or endpoint are specified
|
||||
|
||||
:returns: The raw response
|
||||
"""
|
||||
token = self.generate_token()
|
||||
|
||||
if url is None:
|
||||
if endpoint is None:
|
||||
raise ValueError("Either `endpoint` or `url` must be set")
|
||||
url = self.generate_url(endpoint)
|
||||
|
||||
raw_response = requests.patch(
|
||||
url,
|
||||
json=data,
|
||||
headers={"Authorization": f"Bearer {token}", "Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
if raw_response.status_code == 204:
|
||||
return None
|
||||
|
||||
if raw_response.status_code != 200:
|
||||
raise AppStoreConnectError(raw_response.json())
|
||||
|
||||
response_data = self.extract_data(raw_response)
|
||||
|
||||
if data_type is None:
|
||||
return None
|
||||
|
||||
return deserialize.deserialize(data_type, response_data["data"], throw_on_unhandled=True)
|
||||
|
||||
def post(
|
||||
self,
|
||||
*,
|
||||
endpoint: Optional[str] = None,
|
||||
url: Optional[str] = None,
|
||||
data: Any,
|
||||
data_type: Optional[Type] = None,
|
||||
) -> Any:
|
||||
"""Perform a POST to the endpoint specified.
|
||||
|
||||
Either endpoint or url must be specified. url will take precedence if
|
||||
both are specified.
|
||||
|
||||
:param Optional[str] endpoint: The endpoint to perform the GET on
|
||||
:param Optional[str] url: The full URL to perform the GET on
|
||||
:param Any data: Some JSON serializable data to send
|
||||
:param Optional[Type] data_type: The data type to deserialize the response to
|
||||
|
||||
:raises ValueError: If neither url or endpoint are specified
|
||||
:raises AppStoreConnectError: If we get a failure response back from the API
|
||||
|
||||
:returns: The raw response
|
||||
"""
|
||||
token = self.generate_token()
|
||||
|
||||
if url is None:
|
||||
if endpoint is None:
|
||||
raise ValueError("Either `endpoint` or `url` must be set")
|
||||
url = self.generate_url(endpoint)
|
||||
|
||||
raw_response = requests.post(
|
||||
url,
|
||||
json=data,
|
||||
headers={"Authorization": f"Bearer {token}", "Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
if raw_response.status_code == 201:
|
||||
|
||||
if data_type is None:
|
||||
return None
|
||||
|
||||
response_data = self.extract_data(raw_response)
|
||||
|
||||
return deserialize.deserialize(
|
||||
data_type, response_data["data"], throw_on_unhandled=True
|
||||
)
|
||||
|
||||
if raw_response.status_code >= 200 and raw_response.status_code < 300:
|
||||
return None
|
||||
|
||||
raise AppStoreConnectError(raw_response.json())
|
||||
|
||||
def delete(
|
||||
self, *, endpoint: Optional[str] = None, url: Optional[str] = None
|
||||
) -> requests.Response:
|
||||
"""Perform a DELETE to the endpoint specified.
|
||||
|
||||
Either endpoint or url must be specified. url will take precedence if
|
||||
both are specified.
|
||||
|
||||
:param Optional[str] endpoint: The endpoint to perform the GET on
|
||||
:param Optional[str] url: The full URL to perform the GET on
|
||||
|
||||
:raises ValueError: If neither url or endpoint are specified
|
||||
|
||||
:returns: The raw response
|
||||
"""
|
||||
token = self.generate_token()
|
||||
|
||||
if url is None:
|
||||
if endpoint is None:
|
||||
raise ValueError("Either `endpoint` or `url` must be set")
|
||||
url = self.generate_url(endpoint)
|
||||
|
||||
return requests.delete(
|
||||
url, headers={"Authorization": f"Bearer {token}", "Content-Type": "application/json"},
|
||||
)
|
||||
|
||||
def put_chunk(
|
||||
self, *, url: str, additional_headers: Dict[str, str], data: bytes
|
||||
) -> requests.Response:
|
||||
"""Perform a PUT to the url specified
|
||||
|
||||
:param str url: The full URL to perform the PUT on
|
||||
:param Dict[str,str] additional_headers: The additional headers to add
|
||||
:param bytes data: The raw data to upload
|
||||
|
||||
:returns: The raw response
|
||||
"""
|
||||
token = self.generate_token()
|
||||
|
||||
headers = {
|
||||
**{"Authorization": f"Bearer {token}", "Content-Type": "application/json"},
|
||||
**additional_headers,
|
||||
}
|
||||
|
||||
return requests.put(url=url, data=data, headers=headers)
|
||||
|
||||
def extract_data(self, response: requests.Response) -> Any:
|
||||
"""Validate a response from the API and extract the data
|
||||
|
||||
:param response: The response to validate
|
||||
|
||||
:raises AppStoreConnectError: On any failure to validate
|
||||
|
||||
:returns: Any data in the response
|
||||
"""
|
||||
_ = self
|
||||
|
||||
if not response.ok:
|
||||
raise AppStoreConnectError(response.json())
|
||||
|
||||
return response.json()
|
|
@ -0,0 +1,140 @@
|
|||
"""Wrapper for iTMS tool."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from typing import Optional
|
||||
|
||||
|
||||
from asconnect.utilities import md5_file
|
||||
|
||||
|
||||
def write_metadata(ipa_path: str, apple_id: str, itmsp_path: str) -> None:
|
||||
"""Write out the metadata for an ITMS bundle.
|
||||
|
||||
:param ipa_path: The path to the IPA
|
||||
:param apple_id: The Apple ID for the app (this is _not_ the app ID)
|
||||
:param itmsp_path: The path to the ITMS bundle
|
||||
"""
|
||||
|
||||
metadata = f"""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<package xmlns="http://apple.com/itunes/importer" version="software5.4">
|
||||
<software_assets apple_id="{apple_id}" app_platform="ios">
|
||||
<asset type="bundle">
|
||||
<data_file>
|
||||
<size>{os.path.getsize(ipa_path)}</size>
|
||||
<file_name>{os.path.basename(ipa_path)}</file_name>
|
||||
<checksum type="md5">{md5_file(ipa_path)}</checksum>
|
||||
</data_file>
|
||||
</asset>
|
||||
</software_assets>
|
||||
</package>
|
||||
"""
|
||||
|
||||
metadata_path = os.path.join(itmsp_path, "metadata.xml")
|
||||
with open(metadata_path, "w") as metadata_file:
|
||||
metadata_file.write(metadata)
|
||||
|
||||
|
||||
def remove_upload_tokens() -> None:
|
||||
"""Remove any existing upload tokens.
|
||||
|
||||
If we don't remove these, we can get an error stating that there is already
|
||||
another upload process running. Xcode normally cleans these up, but since we
|
||||
are running directly, we have to do it.
|
||||
"""
|
||||
token_path = os.path.expanduser("~")
|
||||
token_path = os.path.join(
|
||||
token_path, "Library", "Caches", "com.apple.amp.itmstransporter", "UploadTokens"
|
||||
)
|
||||
|
||||
if not os.path.exists(token_path):
|
||||
return
|
||||
|
||||
for token_file in os.listdir(token_path):
|
||||
token_file_path = os.path.join(token_path, token_file)
|
||||
os.remove(token_file_path)
|
||||
|
||||
|
||||
def upload_build(
|
||||
*,
|
||||
ipa_path: str,
|
||||
bundle_id: str,
|
||||
app_id: str,
|
||||
username: str,
|
||||
password: str,
|
||||
log: Optional[logging.Logger] = None,
|
||||
) -> None:
|
||||
"""Upload a new build to ITC
|
||||
|
||||
:param str ipa_path: The path to the .ipa file
|
||||
:param str bundle_id: The bundle ID for the app
|
||||
:param str app_id: The Apple ID for the app. This is _not_ the same as the app ID
|
||||
:param str username: The username to use for authentication
|
||||
:param str password: The password to use for authentication
|
||||
:param Optional[logging.Logger] log: An optional logger to use
|
||||
|
||||
:raises CalledProcessError: If the upload does not complete successfully
|
||||
"""
|
||||
|
||||
if log:
|
||||
log = log.getChild(__name__)
|
||||
else:
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
remove_upload_tokens()
|
||||
|
||||
upload_dir = tempfile.mkdtemp()
|
||||
|
||||
itmsp_path = os.path.join(upload_dir, f"{bundle_id}.itmsp")
|
||||
os.makedirs(itmsp_path)
|
||||
|
||||
shutil.copy(ipa_path, itmsp_path)
|
||||
|
||||
write_metadata(ipa_path, app_id, itmsp_path)
|
||||
|
||||
command = [
|
||||
"xcrun",
|
||||
"iTMSTransporter",
|
||||
"-m",
|
||||
"upload",
|
||||
"-u",
|
||||
username,
|
||||
"-p",
|
||||
"@env:ITMS_TRANSPORTER_PASSWORD",
|
||||
"-f",
|
||||
itmsp_path,
|
||||
"-k",
|
||||
"100000",
|
||||
]
|
||||
|
||||
log.debug(f"Running itms upload: {command}")
|
||||
|
||||
current_environment = os.environ.copy()
|
||||
current_environment["ITMS_TRANSPORTER_PASSWORD"] = password
|
||||
|
||||
upload_process = subprocess.Popen(
|
||||
command,
|
||||
universal_newlines=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
bufsize=1,
|
||||
env=current_environment,
|
||||
)
|
||||
|
||||
assert upload_process.stdout is not None
|
||||
command_output = ""
|
||||
for line in iter(upload_process.stdout.readline, ""):
|
||||
log.info(line.rstrip())
|
||||
command_output += line
|
||||
upload_process.stdout.close()
|
||||
upload_process.wait()
|
||||
|
||||
if upload_process.returncode != 0:
|
||||
raise subprocess.CalledProcessError(
|
||||
upload_process.returncode,
|
||||
command,
|
||||
"Failed to upload the build. Please see the logs for more information.",
|
||||
)
|
|
@ -0,0 +1,13 @@
|
|||
"""Models for the API"""
|
||||
|
||||
from asconnect.models.apps import *
|
||||
from asconnect.models.app_info import *
|
||||
from asconnect.models.app_store import *
|
||||
from asconnect.models.app_store_version_localizations import *
|
||||
from asconnect.models.beta_app_review import *
|
||||
from asconnect.models.beta_detail import *
|
||||
from asconnect.models.beta_groups import *
|
||||
from asconnect.models.builds import *
|
||||
from asconnect.models.idfa import *
|
||||
from asconnect.models.localization import *
|
||||
from asconnect.models.screenshots import *
|
|
@ -0,0 +1,80 @@
|
|||
"""Localization models for the API"""
|
||||
|
||||
import enum
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Links, Relationship, Resource
|
||||
from asconnect.models.app_store import AppStoreVersionState
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class AppInfoLocalization(Resource):
|
||||
"""Represents a build."""
|
||||
|
||||
@deserialize.key("privacy_policy_text", "privacyPolicyText")
|
||||
@deserialize.key("privacy_policy_url", "privacyPolicyUrl")
|
||||
class Attributes:
|
||||
"""Represents app info localization attributes."""
|
||||
|
||||
locale: str
|
||||
name: str
|
||||
privacy_policy_text: str
|
||||
privacy_policy_url: Optional[str]
|
||||
subtitle: Optional[str]
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
||||
|
||||
|
||||
class AppStoreAgeRating(enum.Enum):
|
||||
"""App store age rating."""
|
||||
|
||||
four_plus = "FOUR_PLUS"
|
||||
nine_plus = "NINE_PLUS"
|
||||
twelve_plus = "TWELVE_PLUS"
|
||||
seventeen_plus = "SEVENTEEN_PLUS"
|
||||
|
||||
|
||||
class BrazilAgeRating(enum.Enum):
|
||||
"""Brazil age rating."""
|
||||
|
||||
l = "L"
|
||||
ten = "TEN"
|
||||
twelve = "TWELVE"
|
||||
fourteen = "FOURTEEN"
|
||||
sixteen = "SIXTEEN"
|
||||
eighteen = "EIGHTEEN"
|
||||
|
||||
|
||||
class KidsAgeBand(enum.Enum):
|
||||
"""Kids age band."""
|
||||
|
||||
five_and_under = "FIVE_AND_UNDER"
|
||||
six_to_eight = "SIX_TO_EIGHT"
|
||||
nine_to_eleven = "NINE_TO_ELEVEN"
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class AppInfo(Resource):
|
||||
"""Represents an apps info."""
|
||||
|
||||
@deserialize.key("app_store_age_rating", "appStoreAgeRating")
|
||||
@deserialize.key("app_store_state", "appStoreState")
|
||||
@deserialize.key("brazil_age_rating", "brazilAgeRating")
|
||||
@deserialize.key("kids_age_band", "kidsAgeBand")
|
||||
class Attributes:
|
||||
"""Represents app info localization attributes."""
|
||||
|
||||
app_store_age_rating: AppStoreAgeRating
|
||||
app_store_state: AppStoreVersionState
|
||||
brazil_age_rating: BrazilAgeRating
|
||||
kids_age_band: Optional[KidsAgeBand]
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,104 @@
|
|||
"""App Models for the API"""
|
||||
|
||||
import enum
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Resource, Links, Relationship
|
||||
|
||||
|
||||
class Platform(enum.Enum):
|
||||
"""The different platforms an app can be for."""
|
||||
|
||||
ios = "IOS"
|
||||
macos = "MAC_OS"
|
||||
tvos = "TV_OS"
|
||||
|
||||
|
||||
class ReleaseType(enum.Enum):
|
||||
"""App store release type."""
|
||||
|
||||
manual = "MANUAL"
|
||||
after_approval = "AFTER_APPROVAL"
|
||||
scheduled = "SCHEDULED"
|
||||
|
||||
|
||||
class AppStoreVersionState(enum.Enum):
|
||||
"""App store version state."""
|
||||
|
||||
developer_removed_from_sale = "DEVELOPER_REMOVED_FROM_SALE"
|
||||
developer_rejected = "DEVELOPER_REJECTED"
|
||||
in_review = "IN_REVIEW"
|
||||
invalid_binary = "INVALID_BINARY"
|
||||
metadata_rejected = "METADATA_REJECTED"
|
||||
pending_apple_release = "PENDING_APPLE_RELEASE"
|
||||
pending_contract = "PENDING_CONTRACT"
|
||||
pending_developer_release = "PENDING_DEVELOPER_RELEASE"
|
||||
prepare_for_submission = "PREPARE_FOR_SUBMISSION"
|
||||
preorder_ready_for_sale = "PREORDER_READY_FOR_SALE"
|
||||
processing_for_app_store = "PROCESSING_FOR_APP_STORE"
|
||||
ready_for_sale = "READY_FOR_SALE"
|
||||
rejected = "REJECTED"
|
||||
removed_from_sale = "REMOVED_FROM_SALE"
|
||||
waiting_for_export_compliance = "WAITING_FOR_EXPORT_COMPLIANCE"
|
||||
waiting_for_review = "WAITING_FOR_REVIEW"
|
||||
replaced_with_new_version = "REPLACED_WITH_NEW_VERSION"
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class AppStoreVersion(Resource):
|
||||
"""Represents an app store version."""
|
||||
|
||||
@deserialize.key("app_store_state", "appStoreState")
|
||||
@deserialize.key("earliest_release_date", "earliestReleaseDate")
|
||||
@deserialize.key("release_type", "releaseType")
|
||||
@deserialize.key("uses_idfa", "usesIdfa")
|
||||
@deserialize.key("version_string", "versionString")
|
||||
@deserialize.key("created_date", "createdDate")
|
||||
class Attributes:
|
||||
"""Attributes."""
|
||||
|
||||
platform: Platform
|
||||
app_store_state: AppStoreVersionState
|
||||
copyright: str
|
||||
earliest_release_date: Optional[str]
|
||||
release_type: ReleaseType
|
||||
uses_idfa: Optional[bool]
|
||||
version_string: str
|
||||
created_date: str
|
||||
downloadable: bool
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class AppStoreReviewDetails(Resource):
|
||||
"""Represents an app store review details."""
|
||||
|
||||
@deserialize.key("contact_email", "contactEmail")
|
||||
@deserialize.key("contact_first_name", "contactFirstName")
|
||||
@deserialize.key("contact_last_name", "contactLastName")
|
||||
@deserialize.key("contact_phone", "contactPhone")
|
||||
@deserialize.key("demo_account_name", "demoAccountName")
|
||||
@deserialize.key("demo_account_password", "demoAccountPassword")
|
||||
@deserialize.key("demo_account_required", "demoAccountRequired")
|
||||
class Attributes:
|
||||
"""Attributes."""
|
||||
|
||||
contact_email: str
|
||||
contact_first_name: str
|
||||
contact_last_name: str
|
||||
contact_phone: str
|
||||
demo_account_name: str
|
||||
demo_account_password: str
|
||||
demo_account_required: bool
|
||||
notes: str
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,32 @@
|
|||
"""App Models for the API"""
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Resource, Links, Relationship
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class AppStoreVersionLocalization(Resource):
|
||||
"""Represents an app store version localization."""
|
||||
|
||||
@deserialize.key("marketing_url", "marketingUrl")
|
||||
@deserialize.key("promotional_text", "promotionalText")
|
||||
@deserialize.key("support_url", "supportUrl")
|
||||
@deserialize.key("whats_new", "whatsNew")
|
||||
class Attributes:
|
||||
"""Attributes."""
|
||||
|
||||
description: str
|
||||
keywords: str
|
||||
locale: str
|
||||
marketing_url: Optional[str]
|
||||
promotional_text: Optional[str]
|
||||
support_url: str
|
||||
whats_new: Optional[str]
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,98 @@
|
|||
"""App Models for the API"""
|
||||
|
||||
import enum
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Resource, Links, Relationship
|
||||
|
||||
|
||||
class ContentRightsDeclaration(enum.Enum):
|
||||
"""Contents rights declarations."""
|
||||
|
||||
does_not_use_third_party_content = "DOES_NOT_USE_THIRD_PARTY_CONTENT"
|
||||
uses_third_party_content = "USES_THIRD_PARTY_CONTENT"
|
||||
|
||||
|
||||
@deserialize.key("bundle_id", "bundleId")
|
||||
@deserialize.key("primary_locale", "primaryLocale")
|
||||
@deserialize.key("available_in_new_territories", "availableInNewTerritories")
|
||||
@deserialize.key("content_rights_declaration", "contentRightsDeclaration")
|
||||
@deserialize.key("is_or_ever_was_made_for_kids", "isOrEverWasMadeForKids")
|
||||
class AppAttributes:
|
||||
"""Represents app attributes."""
|
||||
|
||||
bundle_id: str
|
||||
name: str
|
||||
primary_locale: str
|
||||
sku: str
|
||||
available_in_new_territories: Optional[bool]
|
||||
content_rights_declaration: Optional[ContentRightsDeclaration]
|
||||
is_or_ever_was_made_for_kids: bool
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class App(Resource):
|
||||
"""Represents an app."""
|
||||
|
||||
identifier: str
|
||||
attributes: AppAttributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
||||
|
||||
@property
|
||||
def bundle_id(self) -> str:
|
||||
"""Return the bundle ID.
|
||||
|
||||
:returns: The bundle ID
|
||||
"""
|
||||
return self.attributes.bundle_id
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name
|
||||
|
||||
:returns: The name
|
||||
"""
|
||||
return self.attributes.name
|
||||
|
||||
@property
|
||||
def primary_locale(self) -> str:
|
||||
"""Return the primary locale
|
||||
|
||||
:returns: The primary locale
|
||||
"""
|
||||
return self.attributes.primary_locale
|
||||
|
||||
@property
|
||||
def sku(self) -> str:
|
||||
"""Return the SKU
|
||||
|
||||
:returns: The SKU
|
||||
"""
|
||||
return self.attributes.sku
|
||||
|
||||
@property
|
||||
def available_in_new_territories(self) -> Optional[bool]:
|
||||
"""Returns whether or not this app is available in new territories
|
||||
|
||||
:returns: True if this app is available in new territories, False otherwise
|
||||
"""
|
||||
return self.attributes.available_in_new_territories
|
||||
|
||||
@property
|
||||
def content_rights_declaration(self) -> Optional[ContentRightsDeclaration]:
|
||||
"""Return any content rights declaration
|
||||
|
||||
:returns: Any content rights declaration
|
||||
"""
|
||||
return self.attributes.content_rights_declaration
|
||||
|
||||
@property
|
||||
def is_or_ever_was_made_for_kids(self) -> bool:
|
||||
"""Return whether or not this app was ever made for kids
|
||||
|
||||
:returns: True if this app was ever made for kids, False otherwise
|
||||
"""
|
||||
return self.attributes.is_or_ever_was_made_for_kids
|
|
@ -0,0 +1,36 @@
|
|||
"""Model types for requests."""
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Resource, Links, Relationship
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class BetaAppReviewDetail(Resource):
|
||||
"""Represents a beta apps review details."""
|
||||
|
||||
@deserialize.key("contact_email", "contactEmail")
|
||||
@deserialize.key("contact_first_name", "contactFirstName")
|
||||
@deserialize.key("contact_last_name", "contactLastName")
|
||||
@deserialize.key("contact_phone", "contactPhone")
|
||||
@deserialize.key("demo_account_name", "demoAccountName")
|
||||
@deserialize.key("demo_account_password", "demoAccountPassword")
|
||||
@deserialize.key("demo_account_required", "demoAccountRequired")
|
||||
class Attributes:
|
||||
"""Attributes."""
|
||||
|
||||
contact_email: str
|
||||
contact_first_name: str
|
||||
contact_last_name: str
|
||||
contact_phone: str
|
||||
demo_account_name: Optional[str]
|
||||
demo_account_password: Optional[str]
|
||||
demo_account_required: bool
|
||||
notes: Optional[str]
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,57 @@
|
|||
"""Build beta detail models for the API"""
|
||||
|
||||
import enum
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Links, Relationship, Resource
|
||||
|
||||
|
||||
class ExternalBetaState(enum.Enum):
|
||||
"""External beta state."""
|
||||
|
||||
processing = "PROCESSING"
|
||||
processing_exception = "PROCESSING_EXCEPTION"
|
||||
missing_export_compliance = "MISSING_EXPORT_COMPLIANCE"
|
||||
ready_for_beta_testing = "READY_FOR_BETA_TESTING"
|
||||
in_beta_testing = "IN_BETA_TESTING"
|
||||
expired = "EXPIRED"
|
||||
ready_for_beta_submission = "READY_FOR_BETA_SUBMISSION"
|
||||
in_export_compliance_review = "IN_EXPORT_COMPLIANCE_REVIEW"
|
||||
waiting_for_beta_review = "WAITING_FOR_BETA_REVIEW"
|
||||
in_beta_review = "IN_BETA_REVIEW"
|
||||
beta_rejected = "BETA_REJECTED"
|
||||
beta_approved = "BETA_APPROVED"
|
||||
|
||||
|
||||
class InternalBetaState(enum.Enum):
|
||||
"""Internal beta state."""
|
||||
|
||||
processing = "PROCESSING"
|
||||
processing_exception = "PROCESSING_EXCEPTION"
|
||||
missing_export_compliance = "MISSING_EXPORT_COMPLIANCE"
|
||||
ready_for_beta_testing = "READY_FOR_BETA_TESTING"
|
||||
in_beta_testing = "IN_BETA_TESTING"
|
||||
expired = "EXPIRED"
|
||||
in_export_compliance_review = "IN_EXPORT_COMPLIANCE_REVIEW"
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class BuildBetaDetail(Resource):
|
||||
"""Represents a build localization."""
|
||||
|
||||
@deserialize.key("auto_notify_enabled", "autoNotifyEnabled")
|
||||
@deserialize.key("external_build_state", "externalBuildState")
|
||||
@deserialize.key("internal_build_state", "internalBuildState")
|
||||
class Attributes:
|
||||
"""Represents beta build localization attributes."""
|
||||
|
||||
auto_notify_enabled: bool
|
||||
external_build_state: ExternalBetaState
|
||||
internal_build_state: InternalBetaState
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,38 @@
|
|||
"""Beta groups models for the API"""
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Links, Relationship, Resource
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class BetaGroup(Resource):
|
||||
"""Represents a beta group."""
|
||||
|
||||
@deserialize.key("is_internal_group", "isInternalGroup")
|
||||
@deserialize.key("public_link", "publicLink")
|
||||
@deserialize.key("public_link_enabled", "publicLinkEnabled")
|
||||
@deserialize.key("public_link_id", "publicLinkId")
|
||||
@deserialize.key("public_link_limit", "publicLinkLimit")
|
||||
@deserialize.key("public_link_limit_enabled", "publicLinkLimitEnabled")
|
||||
@deserialize.key("created_date", "createdDate")
|
||||
@deserialize.key("feedback_enabled", "feedbackEnabled")
|
||||
class Attributes:
|
||||
"""Represents beta group attributes."""
|
||||
|
||||
is_internal_group: bool
|
||||
name: str
|
||||
public_link: Optional[str]
|
||||
public_link_enabled: Optional[bool]
|
||||
public_link_id: Optional[str]
|
||||
public_link_limit: Optional[int]
|
||||
public_link_limit_enabled: Optional[bool]
|
||||
created_date: str
|
||||
feedback_enabled: bool
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,45 @@
|
|||
"""Build Models for the API"""
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Links, Relationship, Resource
|
||||
|
||||
|
||||
@deserialize.key("template_url", "templateUrl")
|
||||
class IconAssetToken:
|
||||
"""Represents an icon asset token item."""
|
||||
|
||||
template_url: str
|
||||
width: int
|
||||
height: int
|
||||
|
||||
|
||||
@deserialize.key("uploaded_date", "uploadedDate")
|
||||
@deserialize.key("expiration_date", "expirationDate")
|
||||
@deserialize.key("min_os_version", "minOsVersion")
|
||||
@deserialize.key("icon_asset_token", "iconAssetToken")
|
||||
@deserialize.key("processing_state", "processingState")
|
||||
@deserialize.key("uses_non_exempt_encryption", "usesNonExemptEncryption")
|
||||
class BuildAttributes:
|
||||
"""Represents build attributes."""
|
||||
|
||||
version: str
|
||||
uploaded_date: str
|
||||
expiration_date: str
|
||||
expired: bool
|
||||
min_os_version: str
|
||||
icon_asset_token: Optional[IconAssetToken]
|
||||
processing_state: str
|
||||
uses_non_exempt_encryption: Optional[bool]
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class Build(Resource):
|
||||
"""Represents a build."""
|
||||
|
||||
identifier: str
|
||||
attributes: BuildAttributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,41 @@
|
|||
"""Models for the API"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
|
||||
@deserialize.key("self_link", "self")
|
||||
class Links:
|
||||
"""Represents item links."""
|
||||
|
||||
self_link: Optional[str]
|
||||
first: Optional[str]
|
||||
next: Optional[str]
|
||||
related: Optional[str]
|
||||
|
||||
|
||||
class Relationship:
|
||||
"""Represents a relationship."""
|
||||
|
||||
links: Links
|
||||
|
||||
|
||||
class Paging:
|
||||
"""Represents a paging data item in a REST response."""
|
||||
|
||||
total: int
|
||||
limit: int
|
||||
|
||||
|
||||
class Meta:
|
||||
"""Represents a meta data item in a REST response."""
|
||||
|
||||
paging: Paging
|
||||
|
||||
|
||||
@deserialize.key("resource_type", "type")
|
||||
class Resource:
|
||||
"""Represents a resource."""
|
||||
|
||||
resource_type: str
|
|
@ -0,0 +1,31 @@
|
|||
"""App Models for the API"""
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Resource, Links, Relationship
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class IdfaDeclaration(Resource):
|
||||
"""Represents an IDFA declaration."""
|
||||
|
||||
@deserialize.key("attributes_action_with_previous_ad", "attributesActionWithPreviousAd")
|
||||
@deserialize.key(
|
||||
"attributes_app_installation_to_previous_ad", "attributesAppInstallationToPreviousAd"
|
||||
)
|
||||
@deserialize.key("honors_limited_ad_tracking", "honorsLimitedAdTracking")
|
||||
@deserialize.key("serves_ads", "servesAds")
|
||||
class Attributes:
|
||||
"""Attributes."""
|
||||
|
||||
attributes_action_with_previous_ad: bool
|
||||
attributes_app_installation_to_previous_ad: bool
|
||||
honors_limited_ad_tracking: bool
|
||||
serves_ads: bool
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,48 @@
|
|||
"""Localization models for the API"""
|
||||
|
||||
from typing import Dict, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Links, Relationship, Resource
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class BetaAppLocalization(Resource):
|
||||
"""Represents a build."""
|
||||
|
||||
@deserialize.key("feedback_email", "feedbackEmail")
|
||||
@deserialize.key("marketing_url", "marketingUrl")
|
||||
@deserialize.key("privacy_policy_url", "privacyPolicyUrl")
|
||||
@deserialize.key("tv_os_privacy_policy", "tvOsPrivacyPolicy")
|
||||
class Attributes:
|
||||
"""Represents beta app localization attributes."""
|
||||
|
||||
description: str
|
||||
feedback_email: str
|
||||
locale: str
|
||||
marketing_url: Optional[str]
|
||||
privacy_policy_url: Optional[str]
|
||||
tv_os_privacy_policy: Optional[str]
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class BetaBuildLocalization(Resource):
|
||||
"""Represents a build localization."""
|
||||
|
||||
@deserialize.key("whats_new", "whatsNew")
|
||||
class Attributes:
|
||||
"""Represents beta build localization attributes."""
|
||||
|
||||
locale: str
|
||||
whats_new: Optional[str]
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,147 @@
|
|||
"""App Models for the API"""
|
||||
|
||||
import enum
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import deserialize
|
||||
|
||||
from asconnect.models.common import Resource, Links, Relationship
|
||||
|
||||
|
||||
class ScreenshotDisplayType(enum.Enum):
|
||||
"""Screenshot display type."""
|
||||
|
||||
app_iphone_65 = "APP_IPHONE_65"
|
||||
app_iphone_58 = "APP_IPHONE_58"
|
||||
app_iphone_55 = "APP_IPHONE_55"
|
||||
app_iphone_47 = "APP_IPHONE_47"
|
||||
app_iphone_40 = "APP_IPHONE_40"
|
||||
app_iphone_35 = "APP_IPHONE_35"
|
||||
app_ipad_pro_3gen_129 = "APP_IPAD_PRO_3GEN_129"
|
||||
app_ipad_pro_3gen_11 = "APP_IPAD_PRO_3GEN_11"
|
||||
app_ipad_pro_129 = "APP_IPAD_PRO_129"
|
||||
app_ipad_105 = "APP_IPAD_105"
|
||||
app_ipad_97 = "APP_IPAD_97"
|
||||
app_desktop = "APP_DESKTOP"
|
||||
app_watch_series_4 = "APP_WATCH_SERIES_4"
|
||||
app_watch_series_3 = "APP_WATCH_SERIES_3"
|
||||
app_apple_tv = "APP_APPLE_TV"
|
||||
imessage_app_iphone_65 = "IMESSAGE_APP_IPHONE_65"
|
||||
imessage_app_iphone_58 = "IMESSAGE_APP_IPHONE_58"
|
||||
imessage_app_iphone_55 = "IMESSAGE_APP_IPHONE_55"
|
||||
imessage_app_iphone_47 = "IMESSAGE_APP_IPHONE_47"
|
||||
imessage_app_iphone_40 = "IMESSAGE_APP_IPHONE_40"
|
||||
imessage_app_ipad_pro_3gen_129 = "IMESSAGE_APP_IPAD_PRO_3GEN_129"
|
||||
imessage_app_ipad_pro_3gen_11 = "IMESSAGE_APP_IPAD_PRO_3GEN_11"
|
||||
imessage_app_ipad_pro_129 = "IMESSAGE_APP_IPAD_PRO_129"
|
||||
imessage_app_ipad_105 = "IMESSAGE_APP_IPAD_105"
|
||||
imessage_app_ipad_97 = "IMESSAGE_APP_IPAD_97"
|
||||
|
||||
@staticmethod
|
||||
def from_name(name: str) -> "ScreenshotDisplayType":
|
||||
"""Generate the display type from an image name.
|
||||
|
||||
:param name: The name to convert
|
||||
|
||||
:returns: The corresponding display type
|
||||
"""
|
||||
identifier = name.split("-")[0]
|
||||
return ScreenshotDisplayType(identifier)
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class AppScreenshotSet(Resource):
|
||||
"""Represents an app store screenshot set."""
|
||||
|
||||
@deserialize.key("screenshot_display_type", "screenshotDisplayType")
|
||||
class Attributes:
|
||||
"""Attributes."""
|
||||
|
||||
screenshot_display_type: ScreenshotDisplayType
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
||||
|
||||
|
||||
class AppMediaStateError:
|
||||
"""An app media state error."""
|
||||
|
||||
code: str
|
||||
description: str
|
||||
|
||||
|
||||
class AppMediaAssetStateState(enum.Enum):
|
||||
"""The state value for and app media asset state."""
|
||||
|
||||
awaiting_upload = "AWAITING_UPLOAD"
|
||||
upload_complete = "UPLOAD_COMPLETE"
|
||||
complete = "COMPLETE"
|
||||
failed = "FAILED"
|
||||
|
||||
|
||||
class AppMediaAssetState:
|
||||
"""An app media asset state."""
|
||||
|
||||
errors: List[AppMediaStateError]
|
||||
state: AppMediaAssetStateState
|
||||
warnings: Optional[List[AppMediaStateError]]
|
||||
|
||||
|
||||
@deserialize.key("template_url", "templateUrl")
|
||||
class ImageAsset:
|
||||
"""An image asset."""
|
||||
|
||||
template_url: str
|
||||
height: int
|
||||
width: int
|
||||
|
||||
|
||||
class UploadOperationHeader:
|
||||
"""An upload operation header."""
|
||||
|
||||
name: str
|
||||
value: str
|
||||
|
||||
|
||||
@deserialize.key("request_headers", "requestHeaders")
|
||||
class UploadOperation:
|
||||
"""An upload operation."""
|
||||
|
||||
length: int
|
||||
method: str
|
||||
offset: int
|
||||
request_headers: List[UploadOperationHeader]
|
||||
url: str
|
||||
|
||||
|
||||
@deserialize.key("identifier", "id")
|
||||
class AppScreenshot(Resource):
|
||||
"""Represents an app store screenshot."""
|
||||
|
||||
@deserialize.key("asset_delivery_state", "assetDeliveryState")
|
||||
@deserialize.key("asset_token", "assetToken")
|
||||
@deserialize.key("asset_type", "assetType")
|
||||
@deserialize.key("file_name", "fileName")
|
||||
@deserialize.key("file_size", "fileSize")
|
||||
@deserialize.key("image_asset", "imageAsset")
|
||||
@deserialize.key("source_file_checksum", "sourceFileChecksum")
|
||||
@deserialize.key("upload_operations", "uploadOperations")
|
||||
class Attributes:
|
||||
"""Attributes."""
|
||||
|
||||
asset_delivery_state: AppMediaAssetState
|
||||
asset_token: str
|
||||
asset_type: str
|
||||
file_name: str
|
||||
file_size: int
|
||||
image_asset: Optional[ImageAsset]
|
||||
source_file_checksum: Optional[str]
|
||||
uploaded: Optional[bool]
|
||||
upload_operations: Optional[List[UploadOperation]]
|
||||
|
||||
identifier: str
|
||||
attributes: Attributes
|
||||
relationships: Optional[Dict[str, Relationship]]
|
||||
links: Links
|
|
@ -0,0 +1,238 @@
|
|||
"""Wrapper around the Apple App Store Connect APIs."""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Iterator, List
|
||||
|
||||
from asconnect.exceptions import AppStoreConnectError
|
||||
from asconnect.httpclient import HttpClient
|
||||
from asconnect.models import AppScreenshotSet, AppScreenshot, ScreenshotDisplayType, UploadOperation
|
||||
from asconnect.utilities import md5_file
|
||||
|
||||
|
||||
class ScreenshotClient:
|
||||
"""Wrapper class around the ASC API."""
|
||||
|
||||
log: logging.Logger
|
||||
http_client: HttpClient
|
||||
|
||||
def __init__(self, *, http_client: HttpClient, log: logging.Logger,) -> None:
|
||||
"""Construct a new client object.
|
||||
|
||||
:param http_client: The API HTTP client
|
||||
:param log: Any base logger to be used (one will be created if not supplied)
|
||||
"""
|
||||
|
||||
self.http_client = http_client
|
||||
self.log = log.getChild("screenshot")
|
||||
|
||||
def get_sets(self, *, localization_id: str,) -> Iterator[AppScreenshotSet]:
|
||||
"""Get the screenshot sets for an app localization.
|
||||
|
||||
:param localization_id: The localization ID to get the screenshot sets for
|
||||
|
||||
:returns: An iterator to ScreenshotSet
|
||||
"""
|
||||
url = self.http_client.generate_url(
|
||||
f"appStoreVersionLocalizations/{localization_id}/appScreenshotSets"
|
||||
)
|
||||
yield from self.http_client.get(url=url, data_type=List[AppScreenshotSet])
|
||||
|
||||
def delete_set(self, *, screenshot_set_id: str, delete_all_screenshots: bool = True) -> None:
|
||||
"""Delete a screenshot set.
|
||||
|
||||
:param screenshot_set_id: The ID of the screenshot set to delete
|
||||
:param delete_all_screenshots: If this is True, any screenshots will be deleted first.
|
||||
|
||||
:raises AppStoreConnectError: On failure to delete
|
||||
"""
|
||||
|
||||
if delete_all_screenshots:
|
||||
self.delete_screenshots_in_set(screenshot_set_id=screenshot_set_id)
|
||||
|
||||
url = self.http_client.generate_url(f"appScreenshotSets/{screenshot_set_id}")
|
||||
raw_response = self.http_client.delete(url=url)
|
||||
|
||||
if raw_response.status_code != 204:
|
||||
raise AppStoreConnectError(raw_response.json())
|
||||
|
||||
def get_screenshots(self, *, screenshot_set_id: str,) -> Iterator[AppScreenshot]:
|
||||
"""Get the screenshots for a set.
|
||||
|
||||
:param screenshot_set_id: The screenshot set ID to get the screenshots for
|
||||
|
||||
:returns: An iterator to AppScreenshot
|
||||
"""
|
||||
url = self.http_client.generate_url(f"appScreenshotSets/{screenshot_set_id}/appScreenshots")
|
||||
yield from self.http_client.get(url=url, data_type=List[AppScreenshot])
|
||||
|
||||
def delete_screenshot(self, *, screenshot_id: str) -> None:
|
||||
"""Delete a screenshot.
|
||||
|
||||
:param screenshot_id: The ID of the screenshot to delete
|
||||
|
||||
:raises AppStoreConnectError: On failure to delete
|
||||
"""
|
||||
url = self.http_client.generate_url(f"appScreenshots/{screenshot_id}")
|
||||
|
||||
self.log.debug(f"Deleting screenshot with id: {screenshot_id}")
|
||||
raw_response = self.http_client.delete(url=url)
|
||||
|
||||
if raw_response.status_code != 204:
|
||||
raise AppStoreConnectError(raw_response.json())
|
||||
|
||||
def delete_screenshots_in_set(self, *, screenshot_set_id: str) -> None:
|
||||
"""Delete all screenshots in set.
|
||||
|
||||
:param screenshot_set_id: The set to delete the screenshots in
|
||||
"""
|
||||
for screenshot in self.get_screenshots(screenshot_set_id=screenshot_set_id):
|
||||
self.log.info(f"Deleting screenshot: {screenshot.attributes.file_name}")
|
||||
self.delete_screenshot(screenshot_id=screenshot.identifier)
|
||||
|
||||
def delete_all_sets_in_localization(self, *, localization_id: str) -> None:
|
||||
"""Delete all the sets in a localization.
|
||||
|
||||
:param localization_id: The localization to delete the sets from
|
||||
"""
|
||||
for screenshot_set in self.get_sets(localization_id=localization_id):
|
||||
self.log.info(
|
||||
f"Deleting screenshot set: {screenshot_set.attributes.screenshot_display_type.value}"
|
||||
)
|
||||
self.delete_set(
|
||||
screenshot_set_id=screenshot_set.identifier, delete_all_screenshots=True
|
||||
)
|
||||
|
||||
def create_set(
|
||||
self, *, localization_id: str, display_type: ScreenshotDisplayType
|
||||
) -> AppScreenshotSet:
|
||||
"""Create a screenshot set for an app localization.
|
||||
|
||||
:param localization_id: The localization ID to create the screenshot set for
|
||||
:param display_type: The type of preview that the set is for
|
||||
|
||||
:raises AppStoreConnectError: On error when creating the set
|
||||
|
||||
:returns: The new screenshot set
|
||||
"""
|
||||
|
||||
return self.http_client.post(
|
||||
endpoint="appScreenshotSets",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": {"screenshotDisplayType": display_type.value},
|
||||
"type": "appScreenshotSets",
|
||||
"relationships": {
|
||||
"appStoreVersionLocalization": {
|
||||
"data": {"type": "appStoreVersionLocalizations", "id": localization_id,}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
data_type=AppScreenshotSet,
|
||||
)
|
||||
|
||||
def _create_screenshot_reservation(
|
||||
self, *, file_path: str, screenshot_set_id: str
|
||||
) -> AppScreenshot:
|
||||
"""Create a screenshot reservation
|
||||
|
||||
:param file_path: The path to the screenshot to reserve
|
||||
:param screenshot_set_id: The id for the screenshot set to reserve in
|
||||
|
||||
:raises AppStoreConnectError: On error when creating the set
|
||||
|
||||
:returns: The new screenshot set
|
||||
"""
|
||||
|
||||
file_name = os.path.basename(file_path)
|
||||
file_size = os.path.getsize(file_path)
|
||||
|
||||
return self.http_client.post(
|
||||
endpoint="appScreenshots",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": {"fileName": file_name, "fileSize": file_size},
|
||||
"type": "appScreenshots",
|
||||
"relationships": {
|
||||
"appScreenshotSet": {
|
||||
"data": {"type": "appScreenshotSets", "id": screenshot_set_id,}
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
data_type=AppScreenshot,
|
||||
)
|
||||
|
||||
def _upload_screenshot_contents(
|
||||
self, *, file_path: str, upload_operations: List[UploadOperation]
|
||||
) -> None:
|
||||
"""Upload a screenshots contents
|
||||
|
||||
:param file_path: The path to the screenshot to upload
|
||||
:param upload_operations: The upload operations for the screenshot
|
||||
|
||||
:raises AppStoreConnectError: On error when creating the set
|
||||
"""
|
||||
|
||||
# Start by ordering the upload oeprations by offset (so we can just go in order)
|
||||
upload_operations = sorted(upload_operations, key=lambda operation: operation.offset)
|
||||
|
||||
with open(file_path, "rb") as screenshot:
|
||||
for operation in upload_operations:
|
||||
data = screenshot.read(operation.length)
|
||||
headers = {header.name: header.value for header in operation.request_headers}
|
||||
raw_response = self.http_client.put_chunk(
|
||||
url=operation.url, additional_headers=headers, data=data
|
||||
)
|
||||
# TODO Check this
|
||||
assert raw_response.ok
|
||||
|
||||
def _set_screenshot_uploaded(
|
||||
self, *, screenshot: AppScreenshot, file_hash: str
|
||||
) -> AppScreenshot:
|
||||
"""Marks a screenshot as uploaded
|
||||
|
||||
:param screenshot: The screenshot to mark as uploaded
|
||||
:param file_hash: The MD5 of the file
|
||||
|
||||
:returns: The new screenshot
|
||||
"""
|
||||
|
||||
return self.http_client.patch(
|
||||
endpoint=f"appScreenshots/{screenshot.identifier}",
|
||||
data={
|
||||
"data": {
|
||||
"attributes": {"uploaded": True, "sourceFileChecksum": file_hash},
|
||||
"type": "appScreenshots",
|
||||
"id": screenshot.identifier,
|
||||
}
|
||||
},
|
||||
data_type=AppScreenshot,
|
||||
)
|
||||
|
||||
def upload_screenshot(self, *, file_path: str, screenshot_set_id: str) -> AppScreenshot:
|
||||
"""Upload a screenshot
|
||||
|
||||
:param file_path: The path to the screenshot to upload
|
||||
:param screenshot_set_id: The id for the screenshot set to upload to
|
||||
|
||||
:return: The screenshot
|
||||
"""
|
||||
|
||||
checksum = md5_file(file_path)
|
||||
|
||||
screenshot = self._create_screenshot_reservation(
|
||||
file_path=file_path, screenshot_set_id=screenshot_set_id
|
||||
)
|
||||
|
||||
assert screenshot.attributes.upload_operations is not None
|
||||
|
||||
self._upload_screenshot_contents(
|
||||
file_path=file_path, upload_operations=screenshot.attributes.upload_operations
|
||||
)
|
||||
|
||||
return self._set_screenshot_uploaded(screenshot=screenshot, file_hash=checksum)
|
|
@ -0,0 +1,14 @@
|
|||
"""Sorting options."""
|
||||
|
||||
import enum
|
||||
|
||||
|
||||
class BuildsSort(enum.Enum):
|
||||
"""Orders that builds can be sorted."""
|
||||
|
||||
PreReleaseVersion = "preReleaseVersion"
|
||||
PreReleaseVersionReversed = "-preReleaseVersion"
|
||||
UploadedDate = "uploadedDate"
|
||||
UploadedDateReversed = "-uploadedDate"
|
||||
Version = "version"
|
||||
VersionReversed = "-version"
|
|
@ -0,0 +1,53 @@
|
|||
"""Utilities for the library."""
|
||||
|
||||
import hashlib
|
||||
from typing import Dict, Iterator, Optional, TypeVar
|
||||
import urllib.parse
|
||||
|
||||
IteratorType = TypeVar("IteratorType")
|
||||
|
||||
|
||||
def next_or_none(iterator: Iterator[IteratorType]) -> Optional[IteratorType]:
|
||||
"""Get the next value from an iterator, or return None when it is exhausted.
|
||||
|
||||
:param iterator: The iterator to get the next value from
|
||||
|
||||
:returns: The next value or None if exhausted
|
||||
"""
|
||||
try:
|
||||
return next(iterator)
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
|
||||
def update_query_parameters(url: str, query_parameters: Dict[str, str]) -> str:
|
||||
"""Update the query parameters on a URL.
|
||||
|
||||
:param url: The URL to update
|
||||
:param query_parameters: The query parameters to add
|
||||
|
||||
:returns: The updated URL
|
||||
"""
|
||||
parsed_url = urllib.parse.urlparse(url)
|
||||
parsed_parameters = dict(urllib.parse.parse_qsl(parsed_url.query))
|
||||
|
||||
new_parameters = {**parsed_parameters, **query_parameters}
|
||||
new_parameter_string = urllib.parse.urlencode(new_parameters, safe="[]")
|
||||
|
||||
parsed_url = urllib.parse.ParseResult(**dict(parsed_url._asdict(), query=new_parameter_string))
|
||||
|
||||
return urllib.parse.urlunparse(parsed_url)
|
||||
|
||||
|
||||
def md5_file(file_path: str) -> str:
|
||||
"""Generate the MD5 of a file.
|
||||
|
||||
:param file_path: The file to generate the MD5 for
|
||||
|
||||
:returns: The MD5 as a hex string
|
||||
"""
|
||||
hasher = hashlib.md5()
|
||||
with open(file_path, "rb") as file_handle:
|
||||
for chunk in iter(lambda: file_handle.read(4096), b""):
|
||||
hasher.update(chunk)
|
||||
return hasher.hexdigest()
|
|
@ -0,0 +1,280 @@
|
|||
"""Wrapper around the Apple App Store Connect APIs."""
|
||||
|
||||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
import logging
|
||||
from typing import Iterator, List, Optional
|
||||
|
||||
from asconnect.httpclient import HttpClient
|
||||
from asconnect.models import (
|
||||
AppStoreVersion,
|
||||
Platform,
|
||||
AppStoreVersionLocalization,
|
||||
AppStoreReviewDetails,
|
||||
IdfaDeclaration,
|
||||
)
|
||||
from asconnect.utilities import next_or_none, update_query_parameters
|
||||
|
||||
|
||||
class VersionClient:
|
||||
"""Wrapper class around the ASC API."""
|
||||
|
||||
log: logging.Logger
|
||||
http_client: HttpClient
|
||||
|
||||
def __init__(self, *, http_client: HttpClient, log: logging.Logger,) -> None:
|
||||
"""Construct a new client object.
|
||||
|
||||
:param http_client: The API HTTP client
|
||||
:param log: Any base logger to be used (one will be created if not supplied)
|
||||
"""
|
||||
|
||||
self.http_client = http_client
|
||||
self.log = log.getChild("version")
|
||||
|
||||
def get(self, *, version_id: str,) -> Optional[AppStoreVersion]:
|
||||
"""Get the version with the given ID
|
||||
|
||||
:param version_id: The version ID to get
|
||||
|
||||
:returns: An AppStoreVersion if found, None otherwise
|
||||
"""
|
||||
url = self.http_client.generate_url(f"appStoreVersions/{version_id}")
|
||||
|
||||
return next_or_none(self.http_client.get(url=url, data_type=AppStoreVersion))
|
||||
|
||||
def get_all(
|
||||
self,
|
||||
*,
|
||||
app_id: str,
|
||||
version_string: Optional[str] = None,
|
||||
platform: Optional[Platform] = None,
|
||||
) -> Iterator[AppStoreVersion]:
|
||||
"""Get the versions for an app.
|
||||
|
||||
:param app_id: The app ID to get the versions for
|
||||
:param version_string: The version to filter on (if any)
|
||||
:param platform: The platform to filter on (if any)
|
||||
|
||||
:returns: An iterator to AppStoreVersion
|
||||
"""
|
||||
url = self.http_client.generate_url(f"apps/{app_id}/appStoreVersions")
|
||||
|
||||
query_parameters = {}
|
||||
|
||||
if version_string:
|
||||
query_parameters["filter[versionString]"] = version_string
|
||||
|
||||
if platform:
|
||||
query_parameters["filter[platform]"] = platform.value
|
||||
|
||||
url = update_query_parameters(url, query_parameters)
|
||||
|
||||
yield from self.http_client.get(url=url, data_type=List[AppStoreVersion])
|
||||
|
||||
def get_version(self, *, app_id: str, version_string: str) -> Optional[AppStoreVersion]:
|
||||
"""Get the versions for an app.
|
||||
|
||||
:param app_id: The app ID to get the version for
|
||||
:param version_string: The version string to get the version for
|
||||
|
||||
:returns: An AppStoreVersion
|
||||
"""
|
||||
return next_or_none(self.get_all(app_id=app_id, version_string=version_string))
|
||||
|
||||
def get_localizations(self, *, version_id: str,) -> Iterator[AppStoreVersionLocalization]:
|
||||
"""Get the version localizations for an app version.
|
||||
|
||||
:param version_id: The version ID to get the localizations for
|
||||
|
||||
:returns: An AppStoreVersion
|
||||
"""
|
||||
url = self.http_client.generate_url(
|
||||
f"appStoreVersions/{version_id}/appStoreVersionLocalizations"
|
||||
)
|
||||
yield from self.http_client.get(url=url, data_type=List[AppStoreVersionLocalization])
|
||||
|
||||
def set_build(self, *, version_id: str, build_id: str) -> None:
|
||||
"""Set the build for a version
|
||||
|
||||
:param version_id: The ID of the version to set the build on
|
||||
:param build_id: The ID of the build to set
|
||||
"""
|
||||
|
||||
self.http_client.patch(
|
||||
endpoint=f"appStoreVersions/{version_id}/relationships/build",
|
||||
data={"data": {"type": "builds", "id": build_id,}},
|
||||
data_type=None,
|
||||
)
|
||||
|
||||
def get_app_review_details(self, *, version_id: str) -> Optional[AppStoreReviewDetails]:
|
||||
"""Get the app review details for the version.
|
||||
|
||||
:param version_id: The version ID to get the app review details for
|
||||
|
||||
:returns: The app review details if set, None otherwise
|
||||
"""
|
||||
return next_or_none(
|
||||
self.http_client.get(
|
||||
endpoint=f"appStoreVersions/{version_id}/appStoreReviewDetail",
|
||||
data_type=AppStoreReviewDetails,
|
||||
)
|
||||
)
|
||||
|
||||
def set_app_review_details(
|
||||
self,
|
||||
*,
|
||||
version_id: str,
|
||||
contact_email: str,
|
||||
contact_first_name: str,
|
||||
contact_last_name: str,
|
||||
contact_phone: str,
|
||||
demo_account_name: str,
|
||||
demo_account_password: str,
|
||||
demo_account_required: bool,
|
||||
notes: str,
|
||||
) -> AppStoreReviewDetails:
|
||||
"""Set the app store review details
|
||||
|
||||
:param version_id: The ID of the version to set the build on
|
||||
:param contact_email: The email for the app review contact
|
||||
:param contact_first_name: The first name for the app review contact
|
||||
:param contact_last_name: The last name for the app review contact
|
||||
:param contact_phone: The phone number for the app review contact
|
||||
:param demo_account_name: The username for the demo account
|
||||
:param demo_account_password: The password for the demo account
|
||||
:param demo_account_required: Set to True to mark the demo account as required
|
||||
:param notes: Any notes for the reviewer
|
||||
|
||||
:returns: The review details
|
||||
"""
|
||||
|
||||
existing_details = self.get_app_review_details(version_id=version_id)
|
||||
|
||||
attributes = {
|
||||
"contactFirstName": contact_first_name,
|
||||
"contactLastName": contact_last_name,
|
||||
"contactPhone": contact_phone,
|
||||
"contactEmail": contact_email,
|
||||
"demoAccountName": demo_account_name,
|
||||
"demoAccountPassword": demo_account_password,
|
||||
"demoAccountRequired": demo_account_required,
|
||||
"notes": notes,
|
||||
}
|
||||
|
||||
if existing_details:
|
||||
return self.http_client.patch(
|
||||
endpoint=f"appStoreReviewDetails/{existing_details.identifier}",
|
||||
data={
|
||||
"data": {
|
||||
"type": "appStoreReviewDetails",
|
||||
"id": existing_details.identifier,
|
||||
"attributes": attributes,
|
||||
}
|
||||
},
|
||||
data_type=AppStoreReviewDetails,
|
||||
)
|
||||
|
||||
return self.http_client.post(
|
||||
endpoint="appStoreReviewDetails",
|
||||
data={
|
||||
"data": {
|
||||
"type": "appStoreReviewDetails",
|
||||
"attributes": attributes,
|
||||
"relationships": {
|
||||
"appStoreVersion": {"data": {"type": "appStoreVersions", "id": version_id}}
|
||||
},
|
||||
}
|
||||
},
|
||||
data_type=AppStoreReviewDetails,
|
||||
)
|
||||
|
||||
def get_idfa(self, *, version_id: str) -> Optional[IdfaDeclaration]:
|
||||
"""Get the advertising ID declaration.
|
||||
|
||||
:param version_id: The version to get the declaration for
|
||||
|
||||
:returns: The declaration if set, None otherwise
|
||||
"""
|
||||
return next_or_none(
|
||||
self.http_client.get(
|
||||
endpoint=f"appStoreVersions/{version_id}/idfaDeclaration",
|
||||
data_type=IdfaDeclaration,
|
||||
)
|
||||
)
|
||||
|
||||
def set_idfa(
|
||||
self,
|
||||
*,
|
||||
version_id: str,
|
||||
attributes_action_with_previous_ad: bool,
|
||||
attributes_app_installation_to_previous_ad: bool,
|
||||
honors_limited_ad_tracking: bool,
|
||||
serves_ads: bool,
|
||||
) -> IdfaDeclaration:
|
||||
"""Set the IDFA declaration
|
||||
|
||||
:param version_id: The ID of the version to set the build on
|
||||
:param attributes_action_with_previous_ad: Set to True if the ID is used to attribute actions with a previous ad
|
||||
:param attributes_app_installation_to_previous_ad: Set to True if the ID is used to attribute an installation with a previous ad
|
||||
:param honors_limited_ad_tracking: Set to True to confirm that your app honors a users ad tracking preferences
|
||||
:param serves_ads: Set to True if the advertising ID will be used to serve ads within your app
|
||||
|
||||
:returns: The review details
|
||||
"""
|
||||
|
||||
existing_details = self.get_idfa(version_id=version_id)
|
||||
|
||||
attributes = {
|
||||
"attributesActionWithPreviousAd": attributes_action_with_previous_ad,
|
||||
"attributesAppInstallationToPreviousAd": attributes_app_installation_to_previous_ad,
|
||||
"honorsLimitedAdTracking": honors_limited_ad_tracking,
|
||||
"servesAds": serves_ads,
|
||||
}
|
||||
|
||||
if existing_details:
|
||||
return self.http_client.patch(
|
||||
endpoint=f"idfaDeclarations/{existing_details.identifier}",
|
||||
data={
|
||||
"data": {
|
||||
"type": "idfaDeclarations",
|
||||
"id": existing_details.identifier,
|
||||
"attributes": attributes,
|
||||
}
|
||||
},
|
||||
data_type=IdfaDeclaration,
|
||||
)
|
||||
|
||||
return self.http_client.post(
|
||||
endpoint="idfaDeclarations",
|
||||
data={
|
||||
"data": {
|
||||
"type": "idfaDeclarations",
|
||||
"attributes": attributes,
|
||||
"relationships": {
|
||||
"appStoreVersion": {"data": {"type": "appStoreVersions", "id": version_id}}
|
||||
},
|
||||
}
|
||||
},
|
||||
data_type=IdfaDeclaration,
|
||||
)
|
||||
|
||||
def submit_for_review(self, *, version_id: str,) -> None:
|
||||
"""Submit the version for review
|
||||
|
||||
:param version_id: The ID of the version to submit for review
|
||||
"""
|
||||
|
||||
self.http_client.post(
|
||||
endpoint="appStoreVersionSubmissions",
|
||||
data={
|
||||
"data": {
|
||||
"type": "appStoreVersionSubmissions",
|
||||
"relationships": {
|
||||
"appStoreVersion": {"data": {"type": "appStoreVersions", "id": version_id}}
|
||||
},
|
||||
}
|
||||
},
|
||||
data_type=None,
|
||||
)
|
|
@ -0,0 +1,59 @@
|
|||
|
||||
jobs:
|
||||
|
||||
- job: 'Test'
|
||||
pool: 'Hosted macOS'
|
||||
strategy:
|
||||
matrix:
|
||||
Python36:
|
||||
python.version: '3.6'
|
||||
Python37:
|
||||
python.version: '3.7'
|
||||
Python38:
|
||||
python.version: '3.8'
|
||||
maxParallel: 4
|
||||
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '$(python.version)'
|
||||
architecture: 'x64'
|
||||
|
||||
- script: curl -sSL https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py | python
|
||||
displayName: Install Poetry
|
||||
|
||||
- script: python -m venv $(System.DefaultWorkingDirectory)
|
||||
displayName: Create virtual environment
|
||||
|
||||
- script: |
|
||||
source bin/activate
|
||||
$HOME/.poetry/bin/poetry install
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: |
|
||||
source bin/activate
|
||||
python -m pylint --rcfile=pylintrc asconnect
|
||||
python -m pylint --rcfile=pylintrc tests
|
||||
displayName: 'Lint'
|
||||
|
||||
- script: |
|
||||
source bin/activate
|
||||
python -m mypy --ignore-missing-imports asconnect/
|
||||
python -m mypy --ignore-missing-imports tests/
|
||||
displayName: 'Type Check'
|
||||
|
||||
- script: |
|
||||
source bin/activate
|
||||
python -m pytest tests --cov=asconnect --cov-report html --cov-report xml --doctest-modules --junitxml=junit/test-results.xml
|
||||
displayName: 'pytest'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: '**/test-results.xml'
|
||||
testRunTitle: 'Python $(python.version)'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: 'cobertura'
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)/coverage.xml
|
|
@ -0,0 +1,793 @@
|
|||
[[package]]
|
||||
category = "dev"
|
||||
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
name = "appdirs"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "1.4.4"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "An abstract syntax tree for Python with inference support."
|
||||
name = "astroid"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
version = "2.4.2"
|
||||
|
||||
[package.dependencies]
|
||||
lazy-object-proxy = ">=1.4.0,<1.5.0"
|
||||
six = ">=1.12,<2.0"
|
||||
wrapt = ">=1.11,<2.0"
|
||||
|
||||
[package.dependencies.typed-ast]
|
||||
python = "<3.8"
|
||||
version = ">=1.4.0,<1.5"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Atomic file writes."
|
||||
marker = "sys_platform == \"win32\""
|
||||
name = "atomicwrites"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "1.4.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Classes Without Boilerplate"
|
||||
name = "attrs"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "19.3.0"
|
||||
|
||||
[package.extras]
|
||||
azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"]
|
||||
dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"]
|
||||
docs = ["sphinx", "zope.interface"]
|
||||
tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "The uncompromising code formatter."
|
||||
name = "black"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
version = "19.10b0"
|
||||
|
||||
[package.dependencies]
|
||||
appdirs = "*"
|
||||
attrs = ">=18.1.0"
|
||||
click = ">=6.5"
|
||||
pathspec = ">=0.6,<1"
|
||||
regex = "*"
|
||||
toml = ">=0.9.4"
|
||||
typed-ast = ">=1.4.0"
|
||||
|
||||
[package.extras]
|
||||
d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
name = "certifi"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "2020.6.20"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "Foreign Function Interface for Python calling C code."
|
||||
name = "cffi"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "1.14.0"
|
||||
|
||||
[package.dependencies]
|
||||
pycparser = "*"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "Universal encoding detector for Python 2 and 3"
|
||||
name = "chardet"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "3.0.4"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Composable command line interface toolkit"
|
||||
name = "click"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
version = "7.1.2"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Cross-platform colored terminal text."
|
||||
marker = "sys_platform == \"win32\""
|
||||
name = "colorama"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
version = "0.4.3"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Code coverage measurement for Python"
|
||||
name = "coverage"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
version = "5.2.1"
|
||||
|
||||
[package.extras]
|
||||
toml = ["toml"]
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
name = "cryptography"
|
||||
optional = false
|
||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
|
||||
version = "3.0"
|
||||
|
||||
[package.dependencies]
|
||||
cffi = ">=1.8,<1.11.3 || >1.11.3"
|
||||
six = ">=1.4.1"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=1.6.5,<1.8.0 || >1.8.0,<3.1.0 || >3.1.0,<3.1.1 || >3.1.1)", "sphinx-rtd-theme"]
|
||||
docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
|
||||
idna = ["idna (>=2.1)"]
|
||||
pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["pytest (>=3.6.0,<3.9.0 || >3.9.0,<3.9.1 || >3.9.1,<3.9.2 || >3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,<3.79.2 || >3.79.2)"]
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "A library to make deserialization easy."
|
||||
name = "deserialize"
|
||||
optional = false
|
||||
python-versions = ">=3.6,<4.0"
|
||||
version = "1.8.0"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
name = "idna"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "2.10"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Read metadata from Python packages"
|
||||
marker = "python_version < \"3.8\""
|
||||
name = "importlib-metadata"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
|
||||
version = "1.7.0"
|
||||
|
||||
[package.dependencies]
|
||||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "rst.linker"]
|
||||
testing = ["packaging", "pep517", "importlib-resources (>=1.3)"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "A Python utility / library to sort Python imports."
|
||||
name = "isort"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "4.3.21"
|
||||
|
||||
[package.extras]
|
||||
pipfile = ["pipreqs", "requirementslib"]
|
||||
pyproject = ["toml"]
|
||||
requirements = ["pipreqs", "pip-api"]
|
||||
xdg_home = ["appdirs (>=1.4.0)"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "A fast and thorough lazy object proxy."
|
||||
name = "lazy-object-proxy"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "1.4.3"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "McCabe checker, plugin for flake8"
|
||||
name = "mccabe"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "0.6.1"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "More routines for operating on iterables, beyond itertools"
|
||||
name = "more-itertools"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
version = "8.4.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Optional static typing for Python"
|
||||
name = "mypy"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
version = "0.782"
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.4.3,<0.5.0"
|
||||
typed-ast = ">=1.4.0,<1.5.0"
|
||||
typing-extensions = ">=3.7.4"
|
||||
|
||||
[package.extras]
|
||||
dmypy = ["psutil (>=4.0)"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Experimental type system extensions for programs checked with the mypy typechecker."
|
||||
name = "mypy-extensions"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "0.4.3"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Core utilities for Python packages"
|
||||
name = "packaging"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "20.4"
|
||||
|
||||
[package.dependencies]
|
||||
pyparsing = ">=2.0.2"
|
||||
six = "*"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
name = "pathspec"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
version = "0.8.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
name = "pluggy"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "0.13.1"
|
||||
|
||||
[package.dependencies]
|
||||
[package.dependencies.importlib-metadata]
|
||||
python = "<3.8"
|
||||
version = ">=0.12"
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "tox"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "library with cross-python path, ini-parsing, io, code, log facilities"
|
||||
name = "py"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "1.9.0"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "C parser in Python"
|
||||
name = "pycparser"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
version = "2.20"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "JSON Web Token implementation in Python"
|
||||
name = "pyjwt"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "1.7.1"
|
||||
|
||||
[package.extras]
|
||||
crypto = ["cryptography (>=1.4)"]
|
||||
flake8 = ["flake8", "flake8-import-order", "pep8-naming"]
|
||||
test = ["pytest (>=4.0.1,<5.0.0)", "pytest-cov (>=2.6.0,<3.0.0)", "pytest-runner (>=4.2,<5.0.0)"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "python code static checker"
|
||||
name = "pylint"
|
||||
optional = false
|
||||
python-versions = ">=3.5.*"
|
||||
version = "2.5.3"
|
||||
|
||||
[package.dependencies]
|
||||
astroid = ">=2.4.0,<=2.5"
|
||||
colorama = "*"
|
||||
isort = ">=4.2.5,<5"
|
||||
mccabe = ">=0.6,<0.7"
|
||||
toml = ">=0.7.1"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Python parsing module"
|
||||
name = "pyparsing"
|
||||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
version = "2.4.7"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
name = "pytest"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
version = "5.4.3"
|
||||
|
||||
[package.dependencies]
|
||||
atomicwrites = ">=1.0"
|
||||
attrs = ">=17.4.0"
|
||||
colorama = "*"
|
||||
more-itertools = ">=4.0.0"
|
||||
packaging = "*"
|
||||
pluggy = ">=0.12,<1.0"
|
||||
py = ">=1.5.0"
|
||||
wcwidth = "*"
|
||||
|
||||
[package.dependencies.importlib-metadata]
|
||||
python = "<3.8"
|
||||
version = ">=0.12"
|
||||
|
||||
[package.extras]
|
||||
checkqa-mypy = ["mypy (v0.761)"]
|
||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Pytest plugin for measuring coverage."
|
||||
name = "pytest-cov"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
version = "2.10.0"
|
||||
|
||||
[package.dependencies]
|
||||
coverage = ">=4.4"
|
||||
pytest = ">=4.6"
|
||||
|
||||
[package.extras]
|
||||
testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Manage dependencies of tests"
|
||||
name = "pytest-dependency"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "0.5.1"
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=3.6.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Alternative regular expression module, to replace re."
|
||||
name = "regex"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "2020.7.14"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "Python HTTP for Humans."
|
||||
name = "requests"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
||||
version = "2.24.0"
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
chardet = ">=3.0.2,<4"
|
||||
idna = ">=2.5,<3"
|
||||
urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26"
|
||||
|
||||
[package.extras]
|
||||
security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
|
||||
socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"]
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "Python 2 and 3 compatibility utilities"
|
||||
name = "six"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
version = "1.15.0"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Python Library for Tom's Obvious, Minimal Language"
|
||||
name = "toml"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "0.10.1"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "a fork of Python 2 and 3 ast modules with type comment support"
|
||||
name = "typed-ast"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "1.4.1"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Backported and Experimental Type Hints for Python 3.5+"
|
||||
name = "typing-extensions"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "3.7.4.2"
|
||||
|
||||
[[package]]
|
||||
category = "main"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
name = "urllib3"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
|
||||
version = "1.25.10"
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"]
|
||||
socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Measures the displayed width of unicode strings in a terminal"
|
||||
name = "wcwidth"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "0.2.5"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Module for decorators, wrappers and monkey patching."
|
||||
name = "wrapt"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
version = "1.12.1"
|
||||
|
||||
[[package]]
|
||||
category = "dev"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
marker = "python_version < \"3.8\""
|
||||
name = "zipp"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
version = "3.1.0"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
|
||||
testing = ["jaraco.itertools", "func-timeout"]
|
||||
|
||||
[metadata]
|
||||
content-hash = "e1e51142d0a3e439640d5bacb9dbbc32f3edd217b2d6c278a75a683d409c5fc8"
|
||||
python-versions = "^3.6"
|
||||
|
||||
[metadata.files]
|
||||
appdirs = [
|
||||
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
|
||||
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
|
||||
]
|
||||
astroid = [
|
||||
{file = "astroid-2.4.2-py3-none-any.whl", hash = "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386"},
|
||||
{file = "astroid-2.4.2.tar.gz", hash = "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703"},
|
||||
]
|
||||
atomicwrites = [
|
||||
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
|
||||
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
|
||||
]
|
||||
attrs = [
|
||||
{file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
|
||||
{file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"},
|
||||
]
|
||||
black = [
|
||||
{file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"},
|
||||
{file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"},
|
||||
]
|
||||
certifi = [
|
||||
{file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"},
|
||||
{file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"},
|
||||
]
|
||||
cffi = [
|
||||
{file = "cffi-1.14.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1cae98a7054b5c9391eb3249b86e0e99ab1e02bb0cc0575da191aedadbdf4384"},
|
||||
{file = "cffi-1.14.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:cf16e3cf6c0a5fdd9bc10c21687e19d29ad1fe863372b5543deaec1039581a30"},
|
||||
{file = "cffi-1.14.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f2b0fa0c01d8a0c7483afd9f31d7ecf2d71760ca24499c8697aeb5ca37dc090c"},
|
||||
{file = "cffi-1.14.0-cp27-cp27m-win32.whl", hash = "sha256:99f748a7e71ff382613b4e1acc0ac83bf7ad167fb3802e35e90d9763daba4d78"},
|
||||
{file = "cffi-1.14.0-cp27-cp27m-win_amd64.whl", hash = "sha256:c420917b188a5582a56d8b93bdd8e0f6eca08c84ff623a4c16e809152cd35793"},
|
||||
{file = "cffi-1.14.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:399aed636c7d3749bbed55bc907c3288cb43c65c4389964ad5ff849b6370603e"},
|
||||
{file = "cffi-1.14.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cab50b8c2250b46fe738c77dbd25ce017d5e6fb35d3407606e7a4180656a5a6a"},
|
||||
{file = "cffi-1.14.0-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:001bf3242a1bb04d985d63e138230802c6c8d4db3668fb545fb5005ddf5bb5ff"},
|
||||
{file = "cffi-1.14.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:e56c744aa6ff427a607763346e4170629caf7e48ead6921745986db3692f987f"},
|
||||
{file = "cffi-1.14.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b8c78301cefcf5fd914aad35d3c04c2b21ce8629b5e4f4e45ae6812e461910fa"},
|
||||
{file = "cffi-1.14.0-cp35-cp35m-win32.whl", hash = "sha256:8c0ffc886aea5df6a1762d0019e9cb05f825d0eec1f520c51be9d198701daee5"},
|
||||
{file = "cffi-1.14.0-cp35-cp35m-win_amd64.whl", hash = "sha256:8a6c688fefb4e1cd56feb6c511984a6c4f7ec7d2a1ff31a10254f3c817054ae4"},
|
||||
{file = "cffi-1.14.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:95cd16d3dee553f882540c1ffe331d085c9e629499ceadfbda4d4fde635f4b7d"},
|
||||
{file = "cffi-1.14.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:66e41db66b47d0d8672d8ed2708ba91b2f2524ece3dee48b5dfb36be8c2f21dc"},
|
||||
{file = "cffi-1.14.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:028a579fc9aed3af38f4892bdcc7390508adabc30c6af4a6e4f611b0c680e6ac"},
|
||||
{file = "cffi-1.14.0-cp36-cp36m-win32.whl", hash = "sha256:cef128cb4d5e0b3493f058f10ce32365972c554572ff821e175dbc6f8ff6924f"},
|
||||
{file = "cffi-1.14.0-cp36-cp36m-win_amd64.whl", hash = "sha256:337d448e5a725bba2d8293c48d9353fc68d0e9e4088d62a9571def317797522b"},
|
||||
{file = "cffi-1.14.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e577934fc5f8779c554639376beeaa5657d54349096ef24abe8c74c5d9c117c3"},
|
||||
{file = "cffi-1.14.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:62ae9af2d069ea2698bf536dcfe1e4eed9090211dbaafeeedf5cb6c41b352f66"},
|
||||
{file = "cffi-1.14.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:14491a910663bf9f13ddf2bc8f60562d6bc5315c1f09c704937ef17293fb85b0"},
|
||||
{file = "cffi-1.14.0-cp37-cp37m-win32.whl", hash = "sha256:c43866529f2f06fe0edc6246eb4faa34f03fe88b64a0a9a942561c8e22f4b71f"},
|
||||
{file = "cffi-1.14.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2089ed025da3919d2e75a4d963d008330c96751127dd6f73c8dc0c65041b4c26"},
|
||||
{file = "cffi-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b911c2dbd4f423b4c4fcca138cadde747abdb20d196c4a48708b8a2d32b16dd"},
|
||||
{file = "cffi-1.14.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:7e63cbcf2429a8dbfe48dcc2322d5f2220b77b2e17b7ba023d6166d84655da55"},
|
||||
{file = "cffi-1.14.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:3d311bcc4a41408cf5854f06ef2c5cab88f9fded37a3b95936c9879c1640d4c2"},
|
||||
{file = "cffi-1.14.0-cp38-cp38-win32.whl", hash = "sha256:675686925a9fb403edba0114db74e741d8181683dcf216be697d208857e04ca8"},
|
||||
{file = "cffi-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:00789914be39dffba161cfc5be31b55775de5ba2235fe49aa28c148236c4e06b"},
|
||||
{file = "cffi-1.14.0.tar.gz", hash = "sha256:2d384f4a127a15ba701207f7639d94106693b6cd64173d6c8988e2c25f3ac2b6"},
|
||||
]
|
||||
chardet = [
|
||||
{file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"},
|
||||
{file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"},
|
||||
]
|
||||
click = [
|
||||
{file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
|
||||
{file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
|
||||
]
|
||||
colorama = [
|
||||
{file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"},
|
||||
{file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"},
|
||||
]
|
||||
coverage = [
|
||||
{file = "coverage-5.2.1-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:40f70f81be4d34f8d491e55936904db5c527b0711b2a46513641a5729783c2e4"},
|
||||
{file = "coverage-5.2.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:675192fca634f0df69af3493a48224f211f8db4e84452b08d5fcebb9167adb01"},
|
||||
{file = "coverage-5.2.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2fcc8b58953d74d199a1a4d633df8146f0ac36c4e720b4a1997e9b6327af43a8"},
|
||||
{file = "coverage-5.2.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:64c4f340338c68c463f1b56e3f2f0423f7b17ba6c3febae80b81f0e093077f59"},
|
||||
{file = "coverage-5.2.1-cp27-cp27m-win32.whl", hash = "sha256:52f185ffd3291196dc1aae506b42e178a592b0b60a8610b108e6ad892cfc1bb3"},
|
||||
{file = "coverage-5.2.1-cp27-cp27m-win_amd64.whl", hash = "sha256:30bc103587e0d3df9e52cd9da1dd915265a22fad0b72afe54daf840c984b564f"},
|
||||
{file = "coverage-5.2.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:9ea749fd447ce7fb1ac71f7616371f04054d969d412d37611716721931e36efd"},
|
||||
{file = "coverage-5.2.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ce7866f29d3025b5b34c2e944e66ebef0d92e4a4f2463f7266daa03a1332a651"},
|
||||
{file = "coverage-5.2.1-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:4869ab1c1ed33953bb2433ce7b894a28d724b7aa76c19b11e2878034a4e4680b"},
|
||||
{file = "coverage-5.2.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a3ee9c793ffefe2944d3a2bd928a0e436cd0ac2d9e3723152d6fd5398838ce7d"},
|
||||
{file = "coverage-5.2.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:28f42dc5172ebdc32622a2c3f7ead1b836cdbf253569ae5673f499e35db0bac3"},
|
||||
{file = "coverage-5.2.1-cp35-cp35m-win32.whl", hash = "sha256:e26c993bd4b220429d4ec8c1468eca445a4064a61c74ca08da7429af9bc53bb0"},
|
||||
{file = "coverage-5.2.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4186fc95c9febeab5681bc3248553d5ec8c2999b8424d4fc3a39c9cba5796962"},
|
||||
{file = "coverage-5.2.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:b360d8fd88d2bad01cb953d81fd2edd4be539df7bfec41e8753fe9f4456a5082"},
|
||||
{file = "coverage-5.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:1adb6be0dcef0cf9434619d3b892772fdb48e793300f9d762e480e043bd8e716"},
|
||||
{file = "coverage-5.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:098a703d913be6fbd146a8c50cc76513d726b022d170e5e98dc56d958fd592fb"},
|
||||
{file = "coverage-5.2.1-cp36-cp36m-win32.whl", hash = "sha256:962c44070c281d86398aeb8f64e1bf37816a4dfc6f4c0f114756b14fc575621d"},
|
||||
{file = "coverage-5.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1ed2bdb27b4c9fc87058a1cb751c4df8752002143ed393899edb82b131e0546"},
|
||||
{file = "coverage-5.2.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:c890728a93fffd0407d7d37c1e6083ff3f9f211c83b4316fae3778417eab9811"},
|
||||
{file = "coverage-5.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:538f2fd5eb64366f37c97fdb3077d665fa946d2b6d95447622292f38407f9258"},
|
||||
{file = "coverage-5.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:27ca5a2bc04d68f0776f2cdcb8bbd508bbe430a7bf9c02315cd05fb1d86d0034"},
|
||||
{file = "coverage-5.2.1-cp37-cp37m-win32.whl", hash = "sha256:aab75d99f3f2874733946a7648ce87a50019eb90baef931698f96b76b6769a46"},
|
||||
{file = "coverage-5.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c2ff24df02a125b7b346c4c9078c8936da06964cc2d276292c357d64378158f8"},
|
||||
{file = "coverage-5.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:304fbe451698373dc6653772c72c5d5e883a4aadaf20343592a7abb2e643dae0"},
|
||||
{file = "coverage-5.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c96472b8ca5dc135fb0aa62f79b033f02aa434fb03a8b190600a5ae4102df1fd"},
|
||||
{file = "coverage-5.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8505e614c983834239f865da2dd336dcf9d72776b951d5dfa5ac36b987726e1b"},
|
||||
{file = "coverage-5.2.1-cp38-cp38-win32.whl", hash = "sha256:700997b77cfab016533b3e7dbc03b71d33ee4df1d79f2463a318ca0263fc29dd"},
|
||||
{file = "coverage-5.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:46794c815e56f1431c66d81943fa90721bb858375fb36e5903697d5eef88627d"},
|
||||
{file = "coverage-5.2.1-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:16042dc7f8e632e0dcd5206a5095ebd18cb1d005f4c89694f7f8aafd96dd43a3"},
|
||||
{file = "coverage-5.2.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c1bbb628ed5192124889b51204de27c575b3ffc05a5a91307e7640eff1d48da4"},
|
||||
{file = "coverage-5.2.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4f6428b55d2916a69f8d6453e48a505c07b2245653b0aa9f0dee38785939f5e4"},
|
||||
{file = "coverage-5.2.1-cp39-cp39-win32.whl", hash = "sha256:9e536783a5acee79a9b308be97d3952b662748c4037b6a24cbb339dc7ed8eb89"},
|
||||
{file = "coverage-5.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:b8f58c7db64d8f27078cbf2a4391af6aa4e4767cc08b37555c4ae064b8558d9b"},
|
||||
{file = "coverage-5.2.1.tar.gz", hash = "sha256:a34cb28e0747ea15e82d13e14de606747e9e484fb28d63c999483f5d5188e89b"},
|
||||
]
|
||||
cryptography = [
|
||||
{file = "cryptography-3.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:ab49edd5bea8d8b39a44b3db618e4783ef84c19c8b47286bf05dfdb3efb01c83"},
|
||||
{file = "cryptography-3.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:124af7255ffc8e964d9ff26971b3a6153e1a8a220b9a685dc407976ecb27a06a"},
|
||||
{file = "cryptography-3.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:51e40123083d2f946794f9fe4adeeee2922b581fa3602128ce85ff813d85b81f"},
|
||||
{file = "cryptography-3.0-cp27-cp27m-win32.whl", hash = "sha256:dea0ba7fe6f9461d244679efa968d215ea1f989b9c1957d7f10c21e5c7c09ad6"},
|
||||
{file = "cryptography-3.0-cp27-cp27m-win_amd64.whl", hash = "sha256:8ecf9400d0893836ff41b6f977a33972145a855b6efeb605b49ee273c5e6469f"},
|
||||
{file = "cryptography-3.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c608ff4d4adad9e39b5057de43657515c7da1ccb1807c3a27d4cf31fc923b4b"},
|
||||
{file = "cryptography-3.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bec7568c6970b865f2bcebbe84d547c52bb2abadf74cefce396ba07571109c67"},
|
||||
{file = "cryptography-3.0-cp35-abi3-macosx_10_10_x86_64.whl", hash = "sha256:0cbfed8ea74631fe4de00630f4bb592dad564d57f73150d6f6796a24e76c76cd"},
|
||||
{file = "cryptography-3.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:a09fd9c1cca9a46b6ad4bea0a1f86ab1de3c0c932364dbcf9a6c2a5eeb44fa77"},
|
||||
{file = "cryptography-3.0-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:ce82cc06588e5cbc2a7df3c8a9c778f2cb722f56835a23a68b5a7264726bb00c"},
|
||||
{file = "cryptography-3.0-cp35-cp35m-win32.whl", hash = "sha256:9367d00e14dee8d02134c6c9524bb4bd39d4c162456343d07191e2a0b5ec8b3b"},
|
||||
{file = "cryptography-3.0-cp35-cp35m-win_amd64.whl", hash = "sha256:384d7c681b1ab904fff3400a6909261cae1d0939cc483a68bdedab282fb89a07"},
|
||||
{file = "cryptography-3.0-cp36-cp36m-win32.whl", hash = "sha256:4d355f2aee4a29063c10164b032d9fa8a82e2c30768737a2fd56d256146ad559"},
|
||||
{file = "cryptography-3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:45741f5499150593178fc98d2c1a9c6722df88b99c821ad6ae298eff0ba1ae71"},
|
||||
{file = "cryptography-3.0-cp37-cp37m-win32.whl", hash = "sha256:8ecef21ac982aa78309bb6f092d1677812927e8b5ef204a10c326fc29f1367e2"},
|
||||
{file = "cryptography-3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4b9303507254ccb1181d1803a2080a798910ba89b1a3c9f53639885c90f7a756"},
|
||||
{file = "cryptography-3.0-cp38-cp38-win32.whl", hash = "sha256:8713ddb888119b0d2a1462357d5946b8911be01ddbf31451e1d07eaa5077a261"},
|
||||
{file = "cryptography-3.0-cp38-cp38-win_amd64.whl", hash = "sha256:bea0b0468f89cdea625bb3f692cd7a4222d80a6bdafd6fb923963f2b9da0e15f"},
|
||||
{file = "cryptography-3.0.tar.gz", hash = "sha256:8e924dbc025206e97756e8903039662aa58aa9ba357d8e1d8fc29e3092322053"},
|
||||
]
|
||||
deserialize = [
|
||||
{file = "deserialize-1.8.0-py3-none-any.whl", hash = "sha256:a238146aef832b546fe3ec3025c27724be124d734fccfa884235df5165c177ee"},
|
||||
{file = "deserialize-1.8.0.tar.gz", hash = "sha256:a093bd86bf73c5fca8bb73f0e03fc9c06dfe1a14e9b8b34dbfc6c82b25ae52a1"},
|
||||
]
|
||||
idna = [
|
||||
{file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
|
||||
{file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
|
||||
]
|
||||
importlib-metadata = [
|
||||
{file = "importlib_metadata-1.7.0-py2.py3-none-any.whl", hash = "sha256:dc15b2969b4ce36305c51eebe62d418ac7791e9a157911d58bfb1f9ccd8e2070"},
|
||||
{file = "importlib_metadata-1.7.0.tar.gz", hash = "sha256:90bb658cdbbf6d1735b6341ce708fc7024a3e14e99ffdc5783edea9f9b077f83"},
|
||||
]
|
||||
isort = [
|
||||
{file = "isort-4.3.21-py2.py3-none-any.whl", hash = "sha256:6e811fcb295968434526407adb8796944f1988c5b65e8139058f2014cbe100fd"},
|
||||
{file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"},
|
||||
]
|
||||
lazy-object-proxy = [
|
||||
{file = "lazy-object-proxy-1.4.3.tar.gz", hash = "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp27-cp27m-win32.whl", hash = "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp34-cp34m-win32.whl", hash = "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp34-cp34m-win_amd64.whl", hash = "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"},
|
||||
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"},
|
||||
]
|
||||
mccabe = [
|
||||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
|
||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
|
||||
]
|
||||
more-itertools = [
|
||||
{file = "more-itertools-8.4.0.tar.gz", hash = "sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5"},
|
||||
{file = "more_itertools-8.4.0-py3-none-any.whl", hash = "sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.782-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:2c6cde8aa3426c1682d35190b59b71f661237d74b053822ea3d748e2c9578a7c"},
|
||||
{file = "mypy-0.782-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9c7a9a7ceb2871ba4bac1cf7217a7dd9ccd44c27c2950edbc6dc08530f32ad4e"},
|
||||
{file = "mypy-0.782-cp35-cp35m-win_amd64.whl", hash = "sha256:c05b9e4fb1d8a41d41dec8786c94f3b95d3c5f528298d769eb8e73d293abc48d"},
|
||||
{file = "mypy-0.782-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:6731603dfe0ce4352c555c6284c6db0dc935b685e9ce2e4cf220abe1e14386fd"},
|
||||
{file = "mypy-0.782-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:f05644db6779387ccdb468cc47a44b4356fc2ffa9287135d05b70a98dc83b89a"},
|
||||
{file = "mypy-0.782-cp36-cp36m-win_amd64.whl", hash = "sha256:b7fbfabdbcc78c4f6fc4712544b9b0d6bf171069c6e0e3cb82440dd10ced3406"},
|
||||
{file = "mypy-0.782-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:3fdda71c067d3ddfb21da4b80e2686b71e9e5c72cca65fa216d207a358827f86"},
|
||||
{file = "mypy-0.782-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7df6eddb6054d21ca4d3c6249cae5578cb4602951fd2b6ee2f5510ffb098707"},
|
||||
{file = "mypy-0.782-cp37-cp37m-win_amd64.whl", hash = "sha256:a4a2cbcfc4cbf45cd126f531dedda8485671545b43107ded25ce952aac6fb308"},
|
||||
{file = "mypy-0.782-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6bb93479caa6619d21d6e7160c552c1193f6952f0668cdda2f851156e85186fc"},
|
||||
{file = "mypy-0.782-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:81c7908b94239c4010e16642c9102bfc958ab14e36048fa77d0be3289dda76ea"},
|
||||
{file = "mypy-0.782-cp38-cp38-win_amd64.whl", hash = "sha256:5dd13ff1f2a97f94540fd37a49e5d255950ebcdf446fb597463a40d0df3fac8b"},
|
||||
{file = "mypy-0.782-py3-none-any.whl", hash = "sha256:e0b61738ab504e656d1fe4ff0c0601387a5489ca122d55390ade31f9ca0e252d"},
|
||||
{file = "mypy-0.782.tar.gz", hash = "sha256:eff7d4a85e9eea55afa34888dfeaccde99e7520b51f867ac28a48492c0b1130c"},
|
||||
]
|
||||
mypy-extensions = [
|
||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
||||
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
|
||||
]
|
||||
packaging = [
|
||||
{file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"},
|
||||
{file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"},
|
||||
]
|
||||
pathspec = [
|
||||
{file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"},
|
||||
{file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"},
|
||||
]
|
||||
pluggy = [
|
||||
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
|
||||
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
|
||||
]
|
||||
py = [
|
||||
{file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"},
|
||||
{file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"},
|
||||
]
|
||||
pycparser = [
|
||||
{file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"},
|
||||
{file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
|
||||
]
|
||||
pyjwt = [
|
||||
{file = "PyJWT-1.7.1-py2.py3-none-any.whl", hash = "sha256:5c6eca3c2940464d106b99ba83b00c6add741c9becaec087fb7ccdefea71350e"},
|
||||
{file = "PyJWT-1.7.1.tar.gz", hash = "sha256:8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"},
|
||||
]
|
||||
pylint = [
|
||||
{file = "pylint-2.5.3-py3-none-any.whl", hash = "sha256:d0ece7d223fe422088b0e8f13fa0a1e8eb745ebffcb8ed53d3e95394b6101a1c"},
|
||||
{file = "pylint-2.5.3.tar.gz", hash = "sha256:7dd78437f2d8d019717dbf287772d0b2dbdfd13fc016aa7faa08d67bccc46adc"},
|
||||
]
|
||||
pyparsing = [
|
||||
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
|
||||
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
|
||||
]
|
||||
pytest = [
|
||||
{file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
|
||||
{file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
|
||||
]
|
||||
pytest-cov = [
|
||||
{file = "pytest-cov-2.10.0.tar.gz", hash = "sha256:1a629dc9f48e53512fcbfda6b07de490c374b0c83c55ff7a1720b3fccff0ac87"},
|
||||
{file = "pytest_cov-2.10.0-py2.py3-none-any.whl", hash = "sha256:6e6d18092dce6fad667cd7020deed816f858ad3b49d5b5e2b1cc1c97a4dba65c"},
|
||||
]
|
||||
pytest-dependency = [
|
||||
{file = "pytest-dependency-0.5.1.tar.gz", hash = "sha256:c2a892906192663f85030a6ab91304e508e546cddfe557d692d61ec57a1d946b"},
|
||||
]
|
||||
regex = [
|
||||
{file = "regex-2020.7.14-cp27-cp27m-win32.whl", hash = "sha256:e46d13f38cfcbb79bfdb2964b0fe12561fe633caf964a77a5f8d4e45fe5d2ef7"},
|
||||
{file = "regex-2020.7.14-cp27-cp27m-win_amd64.whl", hash = "sha256:6961548bba529cac7c07af2fd4d527c5b91bb8fe18995fed6044ac22b3d14644"},
|
||||
{file = "regex-2020.7.14-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c50a724d136ec10d920661f1442e4a8b010a4fe5aebd65e0c2241ea41dbe93dc"},
|
||||
{file = "regex-2020.7.14-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8a51f2c6d1f884e98846a0a9021ff6861bdb98457879f412fdc2b42d14494067"},
|
||||
{file = "regex-2020.7.14-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:9c568495e35599625f7b999774e29e8d6b01a6fb684d77dee1f56d41b11b40cd"},
|
||||
{file = "regex-2020.7.14-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:51178c738d559a2d1071ce0b0f56e57eb315bcf8f7d4cf127674b533e3101f88"},
|
||||
{file = "regex-2020.7.14-cp36-cp36m-win32.whl", hash = "sha256:9eddaafb3c48e0900690c1727fba226c4804b8e6127ea409689c3bb492d06de4"},
|
||||
{file = "regex-2020.7.14-cp36-cp36m-win_amd64.whl", hash = "sha256:14a53646369157baa0499513f96091eb70382eb50b2c82393d17d7ec81b7b85f"},
|
||||
{file = "regex-2020.7.14-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1269fef3167bb52631ad4fa7dd27bf635d5a0790b8e6222065d42e91bede4162"},
|
||||
{file = "regex-2020.7.14-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d0a5095d52b90ff38592bbdc2644f17c6d495762edf47d876049cfd2968fbccf"},
|
||||
{file = "regex-2020.7.14-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:4c037fd14c5f4e308b8370b447b469ca10e69427966527edcab07f52d88388f7"},
|
||||
{file = "regex-2020.7.14-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bc3d98f621898b4a9bc7fecc00513eec8f40b5b83913d74ccb445f037d58cd89"},
|
||||
{file = "regex-2020.7.14-cp37-cp37m-win32.whl", hash = "sha256:46bac5ca10fb748d6c55843a931855e2727a7a22584f302dd9bb1506e69f83f6"},
|
||||
{file = "regex-2020.7.14-cp37-cp37m-win_amd64.whl", hash = "sha256:0dc64ee3f33cd7899f79a8d788abfbec168410be356ed9bd30bbd3f0a23a7204"},
|
||||
{file = "regex-2020.7.14-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5ea81ea3dbd6767873c611687141ec7b06ed8bab43f68fad5b7be184a920dc99"},
|
||||
{file = "regex-2020.7.14-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bbb332d45b32df41200380fff14712cb6093b61bd142272a10b16778c418e98e"},
|
||||
{file = "regex-2020.7.14-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c11d6033115dc4887c456565303f540c44197f4fc1a2bfb192224a301534888e"},
|
||||
{file = "regex-2020.7.14-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:75aaa27aa521a182824d89e5ab0a1d16ca207318a6b65042b046053cfc8ed07a"},
|
||||
{file = "regex-2020.7.14-cp38-cp38-win32.whl", hash = "sha256:d6cff2276e502b86a25fd10c2a96973fdb45c7a977dca2138d661417f3728341"},
|
||||
{file = "regex-2020.7.14-cp38-cp38-win_amd64.whl", hash = "sha256:7a2dd66d2d4df34fa82c9dc85657c5e019b87932019947faece7983f2089a840"},
|
||||
{file = "regex-2020.7.14.tar.gz", hash = "sha256:3a3af27a8d23143c49a3420efe5b3f8cf1a48c6fc8bc6856b03f638abc1833bb"},
|
||||
]
|
||||
requests = [
|
||||
{file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"},
|
||||
{file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"},
|
||||
]
|
||||
six = [
|
||||
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
|
||||
{file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
|
||||
]
|
||||
toml = [
|
||||
{file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"},
|
||||
{file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"},
|
||||
]
|
||||
typed-ast = [
|
||||
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"},
|
||||
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"},
|
||||
{file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"},
|
||||
{file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"},
|
||||
{file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"},
|
||||
{file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"},
|
||||
{file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"},
|
||||
{file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"},
|
||||
{file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"},
|
||||
{file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"},
|
||||
{file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"},
|
||||
{file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"},
|
||||
{file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"},
|
||||
{file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"},
|
||||
{file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"},
|
||||
{file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"},
|
||||
{file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"},
|
||||
{file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"},
|
||||
{file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"},
|
||||
{file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"},
|
||||
{file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"},
|
||||
]
|
||||
typing-extensions = [
|
||||
{file = "typing_extensions-3.7.4.2-py2-none-any.whl", hash = "sha256:f8d2bd89d25bc39dabe7d23df520442fa1d8969b82544370e03d88b5a591c392"},
|
||||
{file = "typing_extensions-3.7.4.2-py3-none-any.whl", hash = "sha256:6e95524d8a547a91e08f404ae485bbb71962de46967e1b71a0cb89af24e761c5"},
|
||||
{file = "typing_extensions-3.7.4.2.tar.gz", hash = "sha256:79ee589a3caca649a9bfd2a8de4709837400dfa00b6cc81962a1e6a1815969ae"},
|
||||
]
|
||||
urllib3 = [
|
||||
{file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"},
|
||||
{file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"},
|
||||
]
|
||||
wcwidth = [
|
||||
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
||||
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
||||
]
|
||||
wrapt = [
|
||||
{file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"},
|
||||
]
|
||||
zipp = [
|
||||
{file = "zipp-3.1.0-py3-none-any.whl", hash = "sha256:aa36550ff0c0b7ef7fa639055d797116ee891440eac1a56f378e2d3179e0320b"},
|
||||
{file = "zipp-3.1.0.tar.gz", hash = "sha256:c599e4d75c98f6798c509911d08a22e6c021d074469042177c8c86fb92eefd96"},
|
||||
]
|
|
@ -0,0 +1,82 @@
|
|||
[MASTER]
|
||||
|
||||
# Use multiple processes to speed up Pylint.
|
||||
jobs=4
|
||||
|
||||
# Require full doc comments
|
||||
# Require the correct doc comment style
|
||||
# Avoid compare to empty string
|
||||
# Avoid overlapping exceptions
|
||||
# Limit cyclomatic complexity
|
||||
load-plugins=pylint.extensions.docparams,pylint.extensions.docstyle,pylint.extensions.emptystring,pylint.extensions.overlapping_exceptions,pylint.extensions.redefined_variable_type,pylint.extensions.mccabe
|
||||
|
||||
# pylint.extensions.docparams options
|
||||
accept-no-param-doc=no
|
||||
accept-no-raise-doc=no
|
||||
accept-no-return-doc=no
|
||||
accept-no-yeilds-doc=no
|
||||
|
||||
# pylint.extensions.mccabe options
|
||||
max-complexity=12
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# C0330 bad-continuation: Wrong hanging indentation before block
|
||||
# C0413 wrong-import-position: Import “%s” should be placed at the top of the module Used when code and imports are mixed
|
||||
# C1801 len-as-condition: Do not use `len(SEQUENCE)` as condition value
|
||||
# W0511 fixme: TODO statements
|
||||
# W0703 broad-except: Catching too general exception "Exception"
|
||||
# W1201 logging-not-lazy: Specify string format arguments as logging function parameters
|
||||
# W1202 logging-format-interpolation: Use % formatting in logging functions and pass the % parameters as arguments
|
||||
# W1203 logging-fstring-interpolation: Use % formatting in logging functions and pass the % parameters as arguments
|
||||
disable=C0330,C0413,C1801,W0511,W0703,W1201,W1202,W1203
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||
# (visual studio) and html. You can also give a reporter class, eg
|
||||
# mypackage.mymodule.MyReporterClass.
|
||||
output-format=parseable
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Good variable names which should always be accepted, separated by a comma
|
||||
good-names=f,i,j,k,ex,log,_,exposed,unit_test,T
|
||||
|
||||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=yes
|
||||
|
||||
# Methods and functions can have names up to 50 characters. We need this on
|
||||
# occasion for internal methods doing complex things.
|
||||
method-rgx=[a-z_][a-z0-9_]{2,50}$
|
||||
method-name-hint=[a-z_][a-z0-9_]{2,50}$
|
||||
function-rgx=[a-z_][a-z0-9_]{2,50}$
|
||||
fuction-name-hint=[a-z_][a-z0-9_]{2,50}$
|
||||
|
||||
# Increase from 7. 7 isn't enough to encapsulate everything we need for working
|
||||
# with various APIs.
|
||||
max-attributes=15
|
||||
|
||||
[DESIGN]
|
||||
|
||||
# Not everything needs to have methods. Python prefers named tuples over classes
|
||||
# but classes give more flexibility in the future.
|
||||
min-public-methods=0
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# We should aim to stick below 120, but it's not always possible, and makes the
|
||||
# code look uglier in a lot of cases
|
||||
max-line-length=200
|
||||
|
||||
# 50 is a good number, but we need to break it on occasion. 75 is more than
|
||||
# adequate
|
||||
max-statements=75
|
||||
|
||||
|
||||
# 5 is just too small, and I found I was frequently overriding to pass values
|
||||
# into constructors since there is no way to automatically filter those.
|
||||
max-args=8
|
||||
|
||||
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
|
||||
expected-line-ending-format=LF
|
|
@ -0,0 +1,49 @@
|
|||
[tool.poetry]
|
||||
name = "asconnect"
|
||||
version = "1.0.0"
|
||||
description = "A wrapper around the Apple App Store Connect APIs"
|
||||
|
||||
license = "MIT"
|
||||
|
||||
authors = [
|
||||
"Dale Myers <dalemy@microsoft.com>"
|
||||
]
|
||||
|
||||
readme = 'README.md'
|
||||
|
||||
repository = "https://office.visualstudio.com/Outlook%20Mobile/_git/asconnect"
|
||||
homepage = "https://office.visualstudio.com/Outlook%20Mobile/_git/asconnect"
|
||||
|
||||
keywords = ['apple', 'app store', 'itunes', 'connect']
|
||||
|
||||
classifiers = [
|
||||
'Development Status :: 3 - Alpha',
|
||||
'Environment :: Console',
|
||||
'Environment :: MacOS X',
|
||||
'Intended Audience :: Developers',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'Topic :: Software Development',
|
||||
'Topic :: Utilities'
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.6"
|
||||
cryptography = "^3.0.0"
|
||||
deserialize = "^1.2.0"
|
||||
pyjwt = "^1.7.0"
|
||||
requests = "^2.20.0"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
black = "=19.10b0"
|
||||
mypy = "=0.782"
|
||||
pylint = "=2.5.3"
|
||||
pytest = "=5.4.3"
|
||||
pytest-cov = "=2.10.0"
|
||||
pytest-dependency = "=0.5.1"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry>=0.12"]
|
||||
build-backend = "poetry.masonry.api"
|
|
@ -0,0 +1,6 @@
|
|||
#!/bin/bash
|
||||
|
||||
python -m black --line-length 100 asconnect tests
|
||||
python -m pylint --rcfile=pylintrc asconnect tests
|
||||
python -m mypy --ignore-missing-imports asconnect/ tests/
|
||||
|
|
@ -0,0 +1 @@
|
|||
python -m pytest tests --cov=asconnect --cov-report html --cov-report xml --doctest-modules --junitxml=junit/test-results.xml
|
|
@ -0,0 +1,4 @@
|
|||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
"""Base test cases."""
|
|
@ -0,0 +1,36 @@
|
|||
"""Configuration for tests."""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
import _pytest
|
||||
|
||||
|
||||
def pytest_configure(config: _pytest.config.Config) -> None:
|
||||
"""Run configuration before tests.
|
||||
|
||||
:param config: The pytest config object
|
||||
"""
|
||||
_ = config
|
||||
|
||||
repo_root = subprocess.run(
|
||||
["git", "rev-parse", "--show-toplevel"],
|
||||
universal_newlines=True,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
).stdout.strip()
|
||||
|
||||
env_file_path = os.path.join(repo_root, ".env")
|
||||
|
||||
if not os.path.exists(env_file_path):
|
||||
return
|
||||
|
||||
with open(env_file_path) as env_file:
|
||||
contents = env_file.read()
|
||||
|
||||
for line in contents.split("\n"):
|
||||
if len(line) == 0:
|
||||
continue
|
||||
name, value = line.split("=")
|
||||
os.environ[name] = value
|
|
@ -0,0 +1,926 @@
|
|||
# Copyright (c) Microsoft Corporation.
|
||||
# Licensed under the MIT license.
|
||||
|
||||
"""Tests for the package."""
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import Optional, Tuple, Union
|
||||
|
||||
import jwt
|
||||
|
||||
sys.path.insert(0, os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..")))
|
||||
import asconnect # pylint: disable=wrong-import-order
|
||||
|
||||
|
||||
APP_ID = ""
|
||||
IPA_PATH = ""
|
||||
USERNAME = ""
|
||||
PASSWORD = ""
|
||||
|
||||
|
||||
def get_test_data() -> Tuple[str, str, str]:
|
||||
"""Get the test data.
|
||||
|
||||
:returns: The test data
|
||||
"""
|
||||
tests_path = os.path.dirname(os.path.abspath(__file__))
|
||||
test_data_path = os.path.join(tests_path, "test_data.json")
|
||||
with open(test_data_path) as test_data_file:
|
||||
all_test_data = json.load(test_data_file)
|
||||
|
||||
test_data = all_test_data["base"]
|
||||
|
||||
return test_data["key_id"], test_data["key"], test_data["issuer_id"]
|
||||
|
||||
|
||||
def test_import() -> None:
|
||||
"""Test that importing the package works."""
|
||||
assert asconnect is not None
|
||||
|
||||
|
||||
def test_token() -> None:
|
||||
"""Test the JWT token generation"""
|
||||
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
token = client.http_client.generate_token()
|
||||
|
||||
decoded = jwt.decode(token, verify=False)
|
||||
assert decoded["iss"] == issuer_id
|
||||
assert decoded["aud"] == "appstoreconnect-v1"
|
||||
assert datetime.datetime.fromtimestamp(
|
||||
decoded["exp"]
|
||||
) < datetime.datetime.now() + datetime.timedelta(minutes=20)
|
||||
|
||||
# Ensure we return the cached version
|
||||
token2 = client.http_client.generate_token()
|
||||
assert token == token2
|
||||
|
||||
|
||||
def test_get_apps() -> None:
|
||||
"""Test get apps."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
apps = list(client.app.get_all())
|
||||
assert len(apps) != 0
|
||||
print(apps[0])
|
||||
|
||||
|
||||
def test_get_builds() -> None:
|
||||
"""Test get apps."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
builds = client.build.get_builds()
|
||||
assert builds is not None
|
||||
|
||||
|
||||
def test_get_builds_by_version() -> None:
|
||||
"""Test get build by version."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
builds = next(client.build.get_builds(app_id=app.identifier))
|
||||
assert builds is not None
|
||||
|
||||
|
||||
def test_get_app() -> None:
|
||||
"""Test that we can get an app."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
|
||||
def test_upload() -> None:
|
||||
"""Test that we can upload a build."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
asconnect.upload_build(
|
||||
ipa_path=IPA_PATH,
|
||||
bundle_id=APP_ID,
|
||||
app_id=app.identifier,
|
||||
username=USERNAME,
|
||||
password=PASSWORD,
|
||||
)
|
||||
|
||||
|
||||
def test_wait_for_build() -> None:
|
||||
"""Test that we can wait for a build."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
client.build.wait_for_build_to_process(APP_ID, "")
|
||||
|
||||
|
||||
def test_set_testflight_review_details() -> None:
|
||||
"""Test that we can set the testflight app review details."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
|
||||
assert app is not None
|
||||
|
||||
client.beta_review.set_beta_app_review_details(
|
||||
app_id=app.identifier,
|
||||
contact_email="j.doe@example.com",
|
||||
contact_first_name="John",
|
||||
contact_last_name="Doe",
|
||||
contact_phone="1-425-867-5309",
|
||||
demo_account_name="demo@example.com",
|
||||
demo_account_password="P@ssW0rd",
|
||||
demo_account_required=True,
|
||||
)
|
||||
|
||||
|
||||
def test_get_beta_app_localizations() -> None:
|
||||
"""Test get beta app localizations."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
|
||||
assert app is not None
|
||||
|
||||
client.beta_review.get_beta_app_localizations(app.identifier)
|
||||
|
||||
|
||||
def test_set_testflight_localized_review_details() -> None:
|
||||
"""Test that we can set the testflight app review details."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
|
||||
assert app is not None
|
||||
|
||||
info = {
|
||||
"en-US": {
|
||||
"feedbackEmail": "j.doe@example.com",
|
||||
"description": "Thanks for helping us test!",
|
||||
}
|
||||
}
|
||||
|
||||
client.beta_review.set_beta_app_localizations(app.identifier, info)
|
||||
|
||||
|
||||
def test_get_build_localization_details() -> None:
|
||||
"""Test that we can get a builds localization details."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
build = client.build.get_from_build_number(build_number="", bundle_id=APP_ID)
|
||||
|
||||
assert build is not None
|
||||
|
||||
assert len(list(client.beta_review.get_beta_build_localizations(build.identifier))) > 0
|
||||
|
||||
|
||||
def test_set_whats_new() -> None:
|
||||
"""Test that we can get a builds localization details."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
build = client.build.get_from_build_number(build_number="", bundle_id=APP_ID)
|
||||
|
||||
assert build is not None
|
||||
|
||||
client.beta_review.set_whats_new_for_build(
|
||||
build.identifier, {"en-US": "Bug fixes and performance improvements"},
|
||||
)
|
||||
|
||||
|
||||
def test_get_build_beta_detail() -> None:
|
||||
"""Test that we can get a builds beta details."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
build = client.build.get_from_build_number(build_number="", bundle_id=APP_ID)
|
||||
|
||||
assert build is not None
|
||||
|
||||
build_detail = client.build.get_beta_detail(build)
|
||||
|
||||
assert build_detail is not None
|
||||
|
||||
|
||||
def test_get_beta_groups_detail() -> None:
|
||||
"""Test that we can get a builds beta details."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
|
||||
assert app is not None
|
||||
|
||||
groups = [
|
||||
group
|
||||
for group in client.beta_review.get_beta_groups(app.identifier)
|
||||
if group.attributes.name in ["External Testers", "Public Link Testers"]
|
||||
]
|
||||
|
||||
assert len(groups) == 2
|
||||
|
||||
|
||||
def test_set_beta_groups_detail() -> None:
|
||||
"""Test that we can get a builds beta details."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
|
||||
assert app is not None
|
||||
|
||||
build = client.build.get_from_build_number(build_number="", bundle_id=APP_ID)
|
||||
|
||||
assert build is not None
|
||||
|
||||
groups = [
|
||||
group
|
||||
for group in client.beta_review.get_beta_groups(app.identifier)
|
||||
if group.attributes.name in ["External Testers", "Public Link Testers"]
|
||||
]
|
||||
|
||||
client.beta_review.set_beta_groups_on_build(build.identifier, groups)
|
||||
|
||||
|
||||
def test_beta_review_submission() -> None:
|
||||
"""Test that we can submit a build for beta review."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
build = client.build.get_from_build_number(build_number="", bundle_id=APP_ID)
|
||||
|
||||
assert build is not None
|
||||
|
||||
client.beta_review.submit_for_beta_review(build.identifier)
|
||||
|
||||
|
||||
def test_create_new_version() -> None:
|
||||
"""Test that we can create a new app store version."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
client.app.create_new_version(version="1.2.3", app_id=app.identifier)
|
||||
|
||||
|
||||
def test_get_versions() -> None:
|
||||
"""Test that we can get app store versions."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
versions = list(client.version.get_all(app_id=app.identifier))
|
||||
assert len(versions) > 0
|
||||
|
||||
|
||||
def test_get_version() -> None:
|
||||
"""Test that we can get a specific app store version."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
version = client.version.get_version(app_id=app.identifier, version_string="1.2.3")
|
||||
assert version is not None
|
||||
|
||||
|
||||
def test_get_version_localizations() -> None:
|
||||
"""Test that we can get a specific app store version."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
version = client.version.get_version(app_id=app.identifier, version_string="1.2.3")
|
||||
assert version is not None
|
||||
|
||||
localizations = list(client.version.get_localizations(version_id=version.identifier))
|
||||
assert len(localizations) > 0
|
||||
|
||||
|
||||
def test_get_screenshot_sets() -> None:
|
||||
"""Test that we can get screenshot sets from app store version."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
version = client.version.get_version(app_id=app.identifier, version_string="1.2.3")
|
||||
assert version is not None
|
||||
|
||||
localizations = list(client.version.get_localizations(version_id=version.identifier))
|
||||
assert len(localizations) > 0
|
||||
|
||||
en_us = [
|
||||
localization for localization in localizations if localization.attributes.locale == "en-US"
|
||||
][0]
|
||||
|
||||
screenshot_sets = list(client.screenshots.get_sets(localization_id=en_us.identifier))
|
||||
assert len(screenshot_sets) > 0
|
||||
|
||||
|
||||
def test_get_screenshots() -> None:
|
||||
"""Test that we can get screenshot sets from app store version."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
version = client.version.get_version(app_id=app.identifier, version_string="1.2.3")
|
||||
assert version is not None
|
||||
|
||||
localizations = list(client.version.get_localizations(version_id=version.identifier))
|
||||
assert len(localizations) > 0
|
||||
|
||||
en_us = [
|
||||
localization for localization in localizations if localization.attributes.locale == "en-US"
|
||||
][0]
|
||||
|
||||
screenshot_set = list(client.screenshots.get_sets(localization_id=en_us.identifier))[0]
|
||||
|
||||
screenshots = list(
|
||||
client.screenshots.get_screenshots(screenshot_set_id=screenshot_set.identifier)
|
||||
)
|
||||
assert len(screenshots) > 0
|
||||
|
||||
|
||||
def test_delete_screenshot_sets() -> None:
|
||||
"""Test that we can delete screenshot sets from app store version."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
version = client.version.get_version(app_id=app.identifier, version_string="1.2.3")
|
||||
assert version is not None
|
||||
|
||||
for localization in client.version.get_localizations(version_id=version.identifier):
|
||||
client.screenshots.delete_all_sets_in_localization(localization_id=localization.identifier)
|
||||
|
||||
assert version is not None
|
||||
|
||||
|
||||
def test_create_screenshot_set() -> None:
|
||||
"""Test that we can create a screenshot set."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
version = client.version.get_version(app_id=app.identifier, version_string="1.2.3")
|
||||
assert version is not None
|
||||
|
||||
localizations = list(client.version.get_localizations(version_id=version.identifier))
|
||||
|
||||
en_us = [
|
||||
localization for localization in localizations if localization.attributes.locale == "en-US"
|
||||
][0]
|
||||
|
||||
client.screenshots.create_set(
|
||||
localization_id=en_us.identifier,
|
||||
display_type=asconnect.models.ScreenshotDisplayType.app_iphone_65,
|
||||
)
|
||||
|
||||
|
||||
def test_create_screenshot() -> None:
|
||||
"""Test that we can create a screenshot set."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
version = client.version.get_version(app_id=app.identifier, version_string="1.2.3")
|
||||
assert version is not None
|
||||
|
||||
localizations = list(client.version.get_localizations(version_id=version.identifier))
|
||||
|
||||
en_us = [
|
||||
localization for localization in localizations if localization.attributes.locale == "en-US"
|
||||
][0]
|
||||
|
||||
client.screenshots.delete_all_sets_in_localization(localization_id=en_us.identifier)
|
||||
|
||||
screenshot_set = client.screenshots.create_set(
|
||||
localization_id=en_us.identifier,
|
||||
display_type=asconnect.models.ScreenshotDisplayType.app_iphone_65,
|
||||
)
|
||||
|
||||
screenshot = client.screenshots.upload_screenshot(
|
||||
file_path="/path/to/screenshot.png", screenshot_set_id=screenshot_set.identifier,
|
||||
)
|
||||
|
||||
print(screenshot)
|
||||
|
||||
|
||||
class ScreenshotFile:
|
||||
"""Represents a screenshot file."""
|
||||
|
||||
filename: str
|
||||
path: str
|
||||
root_path: str
|
||||
prefix: str
|
||||
order_key: int
|
||||
description: str
|
||||
md5: str
|
||||
|
||||
_PATTERN = re.compile(r"([^-]*)-0*([0-9]*)-?(.*)\.png")
|
||||
|
||||
def __init__(self, filename: str, root_path: str) -> None:
|
||||
"""Create a new instance.
|
||||
|
||||
:param filename: The name of the file
|
||||
:param root_path: The path the file lives under
|
||||
"""
|
||||
match = ScreenshotFile._PATTERN.match(filename)
|
||||
assert match
|
||||
self.filename = filename
|
||||
self.path = os.path.join(root_path, filename)
|
||||
self.root_path = root_path
|
||||
self.prefix = match.group(1)
|
||||
self.order = int(match.group(2))
|
||||
self.description = match.group(3) if match.group(3) else ""
|
||||
self.md5 = asconnect.utilities.md5_file(os.path.join(root_path, filename))
|
||||
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
def upload_screenshots_for_localization(
|
||||
localization: asconnect.models.AppStoreVersionLocalization,
|
||||
root_screenshots_path: str,
|
||||
client: asconnect.Client,
|
||||
) -> None:
|
||||
"""Upload the screenshots for a localization
|
||||
|
||||
:param localization: The localization to upload the screenshots for
|
||||
:param root_screenshots_path: The root path the screenshots live at
|
||||
:param client: The API client
|
||||
|
||||
:raises FileNotFoundError: If the screenshots path doesn't exist
|
||||
"""
|
||||
screenshots_path = os.path.join(root_screenshots_path, localization.attributes.locale)
|
||||
|
||||
if not os.path.exists(screenshots_path):
|
||||
raise FileNotFoundError(
|
||||
f"Could not find screenshots for locale {localization.attributes.locale} in {root_screenshots_path}"
|
||||
)
|
||||
|
||||
screenshot_files = [
|
||||
ScreenshotFile(file_name, screenshots_path)
|
||||
for file_name in os.listdir(screenshots_path)
|
||||
if file_name.endswith(".png")
|
||||
]
|
||||
|
||||
prefixes = {file_name.prefix for file_name in screenshot_files}
|
||||
|
||||
existing_sets = {
|
||||
screenshot_set.attributes.screenshot_display_type: screenshot_set
|
||||
for screenshot_set in client.screenshots.get_sets(localization_id=localization.identifier)
|
||||
}
|
||||
|
||||
handled_displays = set()
|
||||
|
||||
uploads = []
|
||||
|
||||
for prefix in prefixes:
|
||||
display_type = asconnect.models.ScreenshotDisplayType.from_name(prefix)
|
||||
|
||||
if display_type is None:
|
||||
# self.log.warning(f"Could not get display type for: {prefix}")
|
||||
continue
|
||||
|
||||
handled_displays.add(display_type)
|
||||
|
||||
screenshots = [screenshot for screenshot in screenshot_files if screenshot.prefix == prefix]
|
||||
screenshots.sort(key=lambda screenshot: screenshot.order)
|
||||
|
||||
existing_set = existing_sets.get(display_type)
|
||||
|
||||
# If we have an existing one, check if the images are the same. If not, wipe it and re-upload
|
||||
if existing_set is not None:
|
||||
existing_screenshots = client.screenshots.get_screenshots(
|
||||
screenshot_set_id=existing_set.identifier
|
||||
)
|
||||
|
||||
checksums = {screenshot.md5 for screenshot in screenshots}
|
||||
for existing_screenshot in existing_screenshots:
|
||||
if (
|
||||
existing_screenshot.attributes.source_file_checksum is None
|
||||
or existing_screenshot.attributes.source_file_checksum not in checksums
|
||||
):
|
||||
client.screenshots.delete_set(
|
||||
screenshot_set_id=existing_set.identifier, delete_all_screenshots=True
|
||||
)
|
||||
break
|
||||
else:
|
||||
# All images were the same so continue
|
||||
continue
|
||||
|
||||
uploads.append((localization.identifier, display_type, screenshots))
|
||||
|
||||
unhandled_displays = set(existing_sets.keys()) - handled_displays
|
||||
|
||||
for unhandled_display in unhandled_displays:
|
||||
existing_set = existing_sets.get(unhandled_display)
|
||||
if existing_set is None:
|
||||
continue
|
||||
client.screenshots.delete_set(
|
||||
screenshot_set_id=existing_set.identifier, delete_all_screenshots=True
|
||||
)
|
||||
|
||||
for localization_id, display_type, screenshots in uploads:
|
||||
screenshot_set = client.screenshots.create_set(
|
||||
localization_id=localization_id, display_type=display_type
|
||||
)
|
||||
|
||||
for screenshot in screenshots:
|
||||
client.screenshots.upload_screenshot(
|
||||
file_path=screenshot.path, screenshot_set_id=screenshot_set.identifier
|
||||
)
|
||||
|
||||
|
||||
# pylint: enable=too-many-locals
|
||||
|
||||
|
||||
def test_upload_all_screenshots() -> None:
|
||||
"""Test that we can upload all screenshots."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
version = client.version.get_version(app_id=app.identifier, version_string="1.2.3")
|
||||
assert version is not None
|
||||
|
||||
root_screenshots_path = "/path/to/screenshots"
|
||||
|
||||
for localization in client.version.get_localizations(version_id=version.identifier):
|
||||
upload_screenshots_for_localization(localization, root_screenshots_path, client)
|
||||
|
||||
print("Done")
|
||||
|
||||
|
||||
def test_get_app_info_localization() -> None:
|
||||
"""Test that we can get app info localization."""
|
||||
key_id, key_contents, issuer_id = get_test_data()
|
||||
|
||||
client = asconnect.Client(key_id=key_id, key_contents=key_contents, issuer_id=issuer_id,)
|
||||
|
||||
app = client.app.get_from_bundle_id(APP_ID)
|
||||
assert app is not None
|
||||
|
||||
version = client.version.get_version(app_id=app.identifier, version_string="1.2.3")
|
||||
assert version is not None
|
||||
|
||||
app_infos = client.app_info.get_app_info(app_id=app.identifier)
|
||||
app_info = [
|
||||
app_info
|
||||
for app_info in app_infos
|
||||
if app_info.attributes.app_store_state
|
||||
!= asconnect.models.AppStoreVersionState.ready_for_sale
|
||||
][0]
|
||||
|
||||
|
||||
def load_value(
|
||||
*,
|
||||
root_path: str,
|
||||
file_name: str,
|
||||
localized_info: Union[
|
||||
asconnect.models.AppInfoLocalization, asconnect.models.AppStoreVersionLocalization
|
||||
],
|
||||
current_value: Optional[str],
|
||||
version: Optional[str] = None,
|
||||
) -> Optional[str]:
|
||||
"""Load an app value from the metadata files.
|
||||
|
||||
This checks for values in the following order:
|
||||
|
||||
1. Release specific, language specific
|
||||
2. Release specific, language default
|
||||
3. Release default, language specific
|
||||
4. Release default, language default
|
||||
|
||||
If the determined value matches the current value then None will be returned
|
||||
in order to avoid an uneccesary patch call.
|
||||
|
||||
The structure should be something like:
|
||||
- root
|
||||
- appstore
|
||||
- metadata
|
||||
- en-US
|
||||
- screenshots
|
||||
- en-US
|
||||
- releases
|
||||
- 1.2.3
|
||||
- appstore
|
||||
- metadata
|
||||
- en-US
|
||||
- screenshots
|
||||
- en-US
|
||||
- testflight
|
||||
- metadata
|
||||
- en-US
|
||||
|
||||
:param root_path: The root path of the repo
|
||||
:param file_name: The name of the file to load the value from
|
||||
:param localized_info: The localized info this will be going into
|
||||
:param current_value: The current value on the app store
|
||||
:param version: The version of the app
|
||||
|
||||
:returns: The most specific possible value
|
||||
"""
|
||||
|
||||
language_code = localized_info.attributes.locale
|
||||
paths_to_check = []
|
||||
|
||||
if version:
|
||||
release_metadata_path = os.path.join(root_path, "releases", version, "appstore", "metadata")
|
||||
paths_to_check += [
|
||||
os.path.join(release_metadata_path, language_code, file_name),
|
||||
os.path.join(release_metadata_path, "default", file_name),
|
||||
]
|
||||
|
||||
general_metadata_path = os.path.join(root_path, "appstore", "metadata")
|
||||
|
||||
paths_to_check += [
|
||||
os.path.join(general_metadata_path, language_code, file_name),
|
||||
os.path.join(general_metadata_path, "default", file_name),
|
||||
]
|
||||
|
||||
for path in paths_to_check:
|
||||
if os.path.exists(path):
|
||||
with open(path) as metadata_file:
|
||||
value = metadata_file.read().strip()
|
||||
|
||||
if value:
|
||||
if value == current_value:
|
||||
return None
|
||||
return value
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def update_localized_info(
|
||||
client: asconnect.Client, localized_info: asconnect.models.AppInfoLocalization
|
||||
) -> asconnect.models.AppInfoLocalization:
|
||||
"""Update the localized info on the app store with the latest from disk.
|
||||
|
||||
This uses the same structure as Fastlane to make migration easier
|
||||
|
||||
:param client: The API client
|
||||
:param localized_info: The localized info to update
|
||||
|
||||
:returns: The updated localized info
|
||||
"""
|
||||
root_path = "/path/to/metadata"
|
||||
|
||||
name = load_value(
|
||||
root_path=root_path,
|
||||
file_name="name.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.name,
|
||||
)
|
||||
|
||||
privacy_text = load_value(
|
||||
root_path=root_path,
|
||||
file_name="privacy_text.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.privacy_policy_text,
|
||||
)
|
||||
|
||||
privacy_url = load_value(
|
||||
root_path=root_path,
|
||||
file_name="privacy_url.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.privacy_policy_url,
|
||||
)
|
||||
|
||||
subtitle = load_value(
|
||||
root_path=root_path,
|
||||
file_name="subtitle.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.subtitle,
|
||||
)
|
||||
|
||||
return client.app_info.set_localization_properties(
|
||||
localization_id=localized_info.identifier,
|
||||
name=name,
|
||||
privacy_policy_text=privacy_text,
|
||||
privacy_policy_url=privacy_url,
|
||||
subtitle=subtitle,
|
||||
)
|
||||
|
||||
|
||||
def set_localized_app_info(client: asconnect.Client, app_id: str) -> None:
|
||||
"""Set the localized info for the app.
|
||||
|
||||
:param client: The API client
|
||||
:param app_id: The ID of the app to update the info for
|
||||
"""
|
||||
|
||||
app_infos = client.app_info.get_app_info(app_id=app_id)
|
||||
app_info = [
|
||||
app_info
|
||||
for app_info in app_infos
|
||||
if app_info.attributes.app_store_state
|
||||
!= asconnect.models.AppStoreVersionState.ready_for_sale
|
||||
][0]
|
||||
|
||||
for localized_info in client.app_info.get_localizations(app_info_id=app_info.identifier):
|
||||
update_localized_info(client, localized_info)
|
||||
|
||||
|
||||
def update_localized_version_info(
|
||||
client: asconnect.Client,
|
||||
localized_info: asconnect.models.AppStoreVersionLocalization,
|
||||
version: str,
|
||||
) -> asconnect.models.AppInfoLocalization:
|
||||
"""Update the localized info for this version of the app.
|
||||
|
||||
:param client: The API client
|
||||
:param localized_info: The localized info to be updated
|
||||
:param version: The current version of the app
|
||||
|
||||
:returns: The updated app localization info
|
||||
"""
|
||||
root_path = "/path/to/metadata"
|
||||
|
||||
description = load_value(
|
||||
root_path=root_path,
|
||||
file_name="description.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.description,
|
||||
version=version,
|
||||
)
|
||||
|
||||
keywords = load_value(
|
||||
root_path=root_path,
|
||||
file_name="keywords.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.keywords,
|
||||
version=version,
|
||||
)
|
||||
|
||||
marketing_url = load_value(
|
||||
root_path=root_path,
|
||||
file_name="marketing_url.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.marketing_url,
|
||||
version=version,
|
||||
)
|
||||
|
||||
promotional_text = load_value(
|
||||
root_path=root_path,
|
||||
file_name="promotional_text.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.promotional_text,
|
||||
version=version,
|
||||
)
|
||||
|
||||
support_url = load_value(
|
||||
root_path=root_path,
|
||||
file_name="support_url.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.support_url,
|
||||
)
|
||||
|
||||
whats_new = load_value(
|
||||
root_path=root_path,
|
||||
file_name="release_notes.txt",
|
||||
localized_info=localized_info,
|
||||
current_value=localized_info.attributes.whats_new,
|
||||
version=version,
|
||||
)
|
||||
|
||||
return client.app_info.set_localization_version_properties(
|
||||
version_localization_id=localized_info.identifier,
|
||||
description=description,
|
||||
keywords=keywords,
|
||||
marketing_url=marketing_url,
|
||||
promotional_text=promotional_text,
|
||||
support_url=support_url,
|
||||
whats_new=whats_new,
|
||||
)
|
||||
|
||||
|
||||
def set_localized_version_info(client: asconnect.Client, version_id: str, version: str) -> None:
|
||||
"""Set the localized version info
|
||||
|
||||
:param client: The API client
|
||||
:param version_id: The ID of the version to set the info on
|
||||
:param version: The current version of the app
|
||||
"""
|
||||
|
||||
for localized_info in client.version.get_localizations(version_id=version_id):
|
||||
update_localized_version_info(client, localized_info, version)
|
||||
|
||||
print("Done")
|
||||
|
||||
|
||||
def set_build(client: asconnect.Client, app_id: str, version_id: str, version: str) -> None:
|
||||
"""Set the build to the latest
|
||||
|
||||
:param client: The API client
|
||||
:param app_id: The app to set the build on
|
||||
:param version_id: The ID of the version to set the build on
|
||||
:param version: The current version of the app
|
||||
"""
|
||||
|
||||
build = asconnect.utilities.next_or_none(
|
||||
client.build.get_builds(
|
||||
app_id=app_id, sort=asconnect.sorting.BuildsSort.UploadedDateReversed, version=version
|
||||
)
|
||||
)
|
||||
|
||||
assert build is not None
|
||||
|
||||
client.version.set_build(version_id=version_id, build_id=build.identifier)
|
||||
|
||||
|
||||
def set_app_review_details(client: asconnect.Client, version_id: str) -> None:
|
||||
"""Set the app review details for the version.
|
||||
|
||||
:param client: The API client
|
||||
:param version_id: The version of the app to set the app review details for
|
||||
"""
|
||||
contact_email = "j.doe@example.com"
|
||||
contact_first_name = "John"
|
||||
contact_last_name = "Doe"
|
||||
contact_phone = "1-425-867-5309"
|
||||
demo_account_name = "demo@example.com"
|
||||
demo_account_password = "P@ssW0rd"
|
||||
demo_account_required = True
|
||||
notes = ""
|
||||
client.version.set_app_review_details(
|
||||
version_id=version_id,
|
||||
contact_email=contact_email,
|
||||
contact_first_name=contact_first_name,
|
||||
contact_last_name=contact_last_name,
|
||||
contact_phone=contact_phone,
|
||||
demo_account_name=demo_account_name,
|
||||
demo_account_password=demo_account_password,
|
||||
demo_account_required=demo_account_required,
|
||||
notes=notes,
|
||||
)
|
||||
|
||||
|
||||
def set_idfa(client: asconnect.Client, version_id: str) -> None:
|
||||
"""Set the advertising ID declaration
|
||||
|
||||
:param client: The API client
|
||||
:param version_id: The version of the app to set the advertising ID declaration for
|
||||
"""
|
||||
client.version.set_idfa(
|
||||
version_id=version_id,
|
||||
attributes_action_with_previous_ad=True,
|
||||
attributes_app_installation_to_previous_ad=True,
|
||||
honors_limited_ad_tracking=True,
|
||||
serves_ads=False,
|
||||
)
|
Загрузка…
Ссылка в новой задаче