Initial check in for the sdk generation core project (#9327)
* Copy from the alps * Remove dependencies of gulp and rush Used npm and tsc instead; Added lint. * Rename and delete unnecessary files Rename the tool. Deleted files including .gitignore, readme, and licence.
This commit is contained in:
Родитель
8fd2675689
Коммит
eec88b5cc1
|
@ -0,0 +1,104 @@
|
|||
trigger:
|
||||
- master
|
||||
|
||||
pool:
|
||||
name: AzurePipelines-EO
|
||||
demands:
|
||||
- ImageOverride -equals AzurePipelinesUbuntu20.04compliant
|
||||
|
||||
variables:
|
||||
DOCKER_BUILDKIT: 1
|
||||
|
||||
jobs:
|
||||
- job: Build_Verfication
|
||||
steps:
|
||||
- task: npmAuthenticate@0
|
||||
inputs:
|
||||
workingFile: .npmrc
|
||||
|
||||
- script: cp .npmrc .npmrc-registry
|
||||
displayName: Disable .npmrc-registry
|
||||
|
||||
- script: sed -i -E 's/"version":\s*"([0-9\.]+)",/"version":"\1.$(Build.BuildNumber)",/' package.json
|
||||
displayName: Set version in package.json
|
||||
|
||||
- task: Docker@2
|
||||
displayName: Build Image
|
||||
inputs:
|
||||
command: build
|
||||
repository: openapi/sdk-automation
|
||||
tags: $(Build.BuildNumber)
|
||||
Dockerfile: ./Dockerfile
|
||||
containerRegistry: openapi_test_docker_connection
|
||||
arguments: --build-arg UBUNTU_MIRROR=$(UBUNTU_MIRROR)
|
||||
|
||||
- task: Docker@2
|
||||
displayName: Push Image
|
||||
condition: notIn(variables['Build.Reason'], 'PullRequest')
|
||||
inputs:
|
||||
command: push
|
||||
repository: openapi/sdk-automation
|
||||
containerRegistry: openapi_test_docker_connection
|
||||
tags: $(Build.BuildNumber)
|
||||
|
||||
- task: ComponentGovernanceComponentDetection@0
|
||||
displayName: Dependencies Compliance Analysis
|
||||
inputs:
|
||||
verbosity: Verbose
|
||||
snapshotForceEnabled: true
|
||||
useDefaultDetectors: false
|
||||
detectorsToRun: Npm
|
||||
|
||||
- job: Integration_Test
|
||||
variables:
|
||||
SDK_REPO_NAME: PLACEHOLDER
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '18.x'
|
||||
|
||||
- task: npmAuthenticate@0
|
||||
inputs:
|
||||
workingFile: .npmrc
|
||||
|
||||
- script: npm ci
|
||||
displayName: NPM Install
|
||||
|
||||
- script: |
|
||||
git config --global user.email "sdkautomation@microsoft.com"
|
||||
git config --global user.name "SDK Automation"
|
||||
rm .npmrc
|
||||
|
||||
npm config set audit false
|
||||
sudo npm install -g npm@latest
|
||||
|
||||
sudo apt update
|
||||
sudo apt install python3-setuptools python3-venv
|
||||
/usr/bin/python3 -m venv work/pyenv
|
||||
node ./node_modules/.bin/autorest --typescript
|
||||
|
||||
TEST_RUN_ID=`node -e "console.log(Math.random().toString(36).substring(2, 8))"`
|
||||
echo "##vso[task.setvariable variable=TEST_RUN_ID]$TEST_RUN_ID"
|
||||
echo "TEST_RUN_ID=$TEST_RUN_ID"
|
||||
displayName: Init
|
||||
|
||||
- script: |
|
||||
source work/pyenv/bin/activate
|
||||
npm run test-ci
|
||||
displayName: Integration Test
|
||||
timeoutInMinutes: 30
|
||||
env:
|
||||
SPEC_REPO: $(spec-repo)
|
||||
GITHUB_COMMENT_AUTHOR_NAME: $(github-comment-author-name)
|
||||
GITHUBAPP_ID: $(githubapp-id)
|
||||
GITHUBAPP_PRIVATE_KEY: $(githubapp-private-key)
|
||||
|
||||
- script: |
|
||||
[ -z $TEST_RUN_ID ] || npm run clear-github-test-repos
|
||||
displayName: Clean Up
|
||||
condition: always()
|
||||
env:
|
||||
SPEC_REPO: $(spec-repo)
|
||||
GITHUB_COMMENT_AUTHOR_NAME: $(github-comment-author-name)
|
||||
GITHUBAPP_ID: $(githubapp-id)
|
||||
GITHUBAPP_PRIVATE_KEY: $(githubapp-private-key)
|
|
@ -0,0 +1,10 @@
|
|||
import globals from "globals";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
|
||||
export default [
|
||||
{files: ["**/*.{js,mjs,cjs,ts}"]},
|
||||
{files: ["**/*.js"], languageOptions: {sourceType: "commonjs"}},
|
||||
{languageOptions: { globals: globals.node }},
|
||||
...tseslint.configs.recommended,
|
||||
];
|
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 1.2 KiB |
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 30 KiB |
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 1.1 KiB |
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 1.1 KiB |
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 1.1 KiB |
|
@ -0,0 +1,46 @@
|
|||
import { getTestGithubClient, repoOwner } from './utils';
|
||||
import { sdkAutomationCliConfig } from '../src/cli/config';
|
||||
import { Octokit, RestEndpointMethodTypes } from '@octokit/rest';
|
||||
|
||||
type ReposListForOrgResponse = RestEndpointMethodTypes['repos']['listForOrg']['response']['data']
|
||||
|
||||
const cleanGithubTestRepos = async () => {
|
||||
const github = await getTestGithubClient();
|
||||
const reposRsp: ReposListForOrgResponse = await github.paginate(github.repos.listForOrg.endpoint.merge({
|
||||
org: repoOwner
|
||||
}));
|
||||
|
||||
const runId = sdkAutomationCliConfig.testRunId;
|
||||
const prefixToMatch = runId ? `test-${runId}` : 'test';
|
||||
|
||||
let repos = reposRsp.map(repo => repo.name);
|
||||
console.log(`Repos in ${repoOwner}:`);
|
||||
console.log(repos.join('\n'));
|
||||
|
||||
console.log(`\nFilter: ${prefixToMatch}`);
|
||||
repos = repos.filter(name => name.startsWith(prefixToMatch));
|
||||
console.log(`Repos after filter:`);
|
||||
console.log(repos.join('\n'));
|
||||
|
||||
const parallelCount = 4;
|
||||
const promises: Promise<void>[] = [];
|
||||
for (let i = 0; i < parallelCount; ++i) {
|
||||
promises.push(cleanReposWorker(github, repos));
|
||||
}
|
||||
await Promise.all(promises);
|
||||
};
|
||||
|
||||
const cleanReposWorker = async (github: Octokit, repos: string[]) => {
|
||||
while (repos.length > 0) {
|
||||
const repoName = repos.shift() as string;
|
||||
console.log(`Cleaning up ${repoOwner}/${repoName}`);
|
||||
try {
|
||||
await github.repos.delete({ owner: repoOwner, repo: repoName });
|
||||
} catch (e) {
|
||||
console.log(`Failed to delete ${repoOwner}/${repoName}: ${e.message} ${e.stack}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// tslint:disable-next-line: no-floating-promises
|
||||
cleanGithubTestRepos();
|
|
@ -0,0 +1,26 @@
|
|||
export const fixtures = {
|
||||
specTest: {
|
||||
name: 'spec-test',
|
||||
patch0_AddService: 'spec-test-patch0-add-service',
|
||||
patch1_Add_02_01: 'spec-test-patch1-add-02-01', // Depends on patch0
|
||||
patch2_Empty: 'spec-test-patch2-empty-update', // Depends on patch0
|
||||
patch3_TwoReadme: 'spec-test-patch3-two-readme'
|
||||
},
|
||||
sdkGo: {
|
||||
name: 'sdk-go-test'
|
||||
},
|
||||
sdkJs: {
|
||||
name: 'sdk-js-test',
|
||||
patch0_AddServiceGen: 'sdk-js-test-patch0-add-service-gen' // CodeGen result after spec patch0
|
||||
},
|
||||
sdkPy: {
|
||||
name: 'sdk-py-test',
|
||||
patch0_Track2: 'sdk-py-test-patch0-track2' // Track2 config
|
||||
},
|
||||
sdkTf: {
|
||||
name: 'sdk-tf-test'
|
||||
},
|
||||
schmARM: {
|
||||
name: 'schm-arm-test'
|
||||
}
|
||||
} as const;
|
|
@ -0,0 +1 @@
|
|||
schemas/code-model-v1
|
|
@ -0,0 +1,18 @@
|
|||
{
|
||||
"$schema": "https://openapistorageprod.blob.core.windows.net/sdkautomation/prod/schemas/swagger_to_sdk_config.schema.json",
|
||||
"meta": {
|
||||
"autorest_options": {
|
||||
"use": "@autorest/azureresourceschema@3.0.79",
|
||||
"azureresourceschema": "",
|
||||
"multiapi": "",
|
||||
"sdkrel:azureresourceschema-folder": ".",
|
||||
"title": "none",
|
||||
"pass-thru:subset-reducer": ""
|
||||
},
|
||||
"advanced_options": {
|
||||
"create_sdk_pull_requests": true,
|
||||
"sdk_generation_pull_request_base": "integration_branch"
|
||||
},
|
||||
"version": "0.2.0"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
|
||||
# Folders
|
||||
_obj
|
||||
_test
|
||||
|
||||
# Architecture specific extensions/prefixes
|
||||
# *.[568vq]
|
||||
[568vq].out
|
||||
|
||||
*.cgo1.go
|
||||
*.cgo2.c
|
||||
_cgo_defun.c
|
||||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
||||
*.test
|
||||
*.prof
|
||||
*.zip
|
||||
|
||||
# Editor swap files
|
||||
*.swp
|
||||
*~
|
||||
.DS_Store
|
||||
.vscode
|
||||
|
||||
# ignore vendor/
|
||||
vendor/
|
|
@ -0,0 +1,427 @@
|
|||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
digest = "1:6b1426cad7057b717351eacf5b6fe70f053f11aac1ce254bbf2fd72c031719eb"
|
||||
name = "contrib.go.opencensus.io/exporter/ocagent"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "dcb33c7f3b7cfe67e8a2cea10207ede1b7c40764"
|
||||
version = "v0.4.12"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:a4431dd9598c9926ff12356c100ed73807815112de703322564f76d60c5294a4"
|
||||
name = "github.com/Azure/go-autorest"
|
||||
packages = [
|
||||
"autorest",
|
||||
"autorest/adal",
|
||||
"autorest/azure",
|
||||
"autorest/azure/auth",
|
||||
"autorest/azure/cli",
|
||||
"autorest/date",
|
||||
"autorest/to",
|
||||
"autorest/validation",
|
||||
"logger",
|
||||
"tracing",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "2913f263500c4a5b23dada1b46ccd22ac972315f"
|
||||
version = "v12.3.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:55388fd080150b9a072912f97b1f5891eb0b50df43401f8b75fb4273d3fec9fc"
|
||||
name = "github.com/Masterminds/semver"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "c7af12943936e8c39859482e61f0574c2fd7fc75"
|
||||
version = "v1.4.2"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:8f5acd4d4462b5136af644d25101f0968a7a94ee90fcb2059cec5b7cc42e0b20"
|
||||
name = "github.com/census-instrumentation/opencensus-proto"
|
||||
packages = [
|
||||
"gen-go/agent/common/v1",
|
||||
"gen-go/agent/metrics/v1",
|
||||
"gen-go/agent/trace/v1",
|
||||
"gen-go/metrics/v1",
|
||||
"gen-go/resource/v1",
|
||||
"gen-go/trace/v1",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "d89fa54de508111353cb0b06403c00569be780d8"
|
||||
version = "v0.2.1"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:76dc72490af7174349349838f2fe118996381b31ea83243812a97e5a0fd5ed55"
|
||||
name = "github.com/dgrijalva/jwt-go"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "06ea1031745cb8b3dab3f6a236daf2b0aa468b7e"
|
||||
version = "v3.2.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:cf0d2e435fd4ce45b789e93ef24b5f08e86be0e9807a16beb3694e2d8c9af965"
|
||||
name = "github.com/dimchansky/utfbom"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "d2133a1ce379ef6fa992b0514a77146c60db9d1c"
|
||||
version = "v1.1.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:052aa482c25b2d76f432c16c4b404a6a3cb9cc205a95818c7680fcec501a43b7"
|
||||
name = "github.com/dnaeon/go-vcr"
|
||||
packages = [
|
||||
"cassette",
|
||||
"recorder",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "ac8906116758e03208f043a52754f16f2982f26d"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:78102ee4d536347316bc42e818340cc50902e45dbd7fdd524c5a1fc0cb07b588"
|
||||
name = "github.com/globalsign/mgo"
|
||||
packages = [
|
||||
".",
|
||||
"bson",
|
||||
"internal/json",
|
||||
"internal/sasl",
|
||||
"internal/scram",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "eeefdecb41b842af6dc652aaea4026e8403e62df"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:b532ee3f683c057e797694b5bfeb3827d89e6adf41c53dbc80e549bca76364ea"
|
||||
name = "github.com/golang/protobuf"
|
||||
packages = [
|
||||
"jsonpb",
|
||||
"proto",
|
||||
"protoc-gen-go/descriptor",
|
||||
"protoc-gen-go/generator",
|
||||
"protoc-gen-go/generator/internal/remap",
|
||||
"protoc-gen-go/plugin",
|
||||
"ptypes",
|
||||
"ptypes/any",
|
||||
"ptypes/duration",
|
||||
"ptypes/struct",
|
||||
"ptypes/timestamp",
|
||||
"ptypes/wrappers",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "6c65a5562fc06764971b7c5d05c76c75e84bdbf7"
|
||||
version = "v1.3.2"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:3b341cd71012c63aacddabfc70b9110be8e30c553349552ad3f77242843f2d03"
|
||||
name = "github.com/grpc-ecosystem/grpc-gateway"
|
||||
packages = [
|
||||
"internal",
|
||||
"runtime",
|
||||
"utilities",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "ad529a448ba494a88058f9e5be0988713174ac86"
|
||||
version = "v1.9.5"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:7fae9ec96d10b2afce0da23c378c8b3389319b7f92fa092f2621bba3078cfb4b"
|
||||
name = "github.com/hashicorp/golang-lru"
|
||||
packages = ["simplelru"]
|
||||
pruneopts = "UT"
|
||||
revision = "7f827b33c0f158ec5dfbba01bb0b14a4541fd81d"
|
||||
version = "v0.5.3"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:870d441fe217b8e689d7949fef6e43efbc787e50f200cb1e70dbca9204a1d6be"
|
||||
name = "github.com/inconshreveable/mousetrap"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75"
|
||||
version = "v1.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:ca955a9cd5b50b0f43d2cc3aeb35c951473eeca41b34eb67507f1dbcc0542394"
|
||||
name = "github.com/kr/pretty"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "73f6ac0b30a98e433b289500d779f50c1a6f0712"
|
||||
version = "v0.1.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:15b5cc79aad436d47019f814fde81a10221c740dc8ddf769221a65097fb6c2e9"
|
||||
name = "github.com/kr/text"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "e2ffdb16a802fe2bb95e2e35ff34f0e53aeef34f"
|
||||
version = "v0.1.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:5d231480e1c64a726869bc4142d270184c419749d34f167646baa21008eb0a79"
|
||||
name = "github.com/mitchellh/go-homedir"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "af06845cf3004701891bf4fdb884bfe4920b3727"
|
||||
version = "v1.1.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:cf31692c14422fa27c83a05292eb5cbe0fb2775972e8f1f8446a71549bd8980b"
|
||||
name = "github.com/pkg/errors"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "ba968bfe8b2f7e042a574c888954fccecfa385b4"
|
||||
version = "v0.8.1"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:274f67cb6fed9588ea2521ecdac05a6d62a8c51c074c1fccc6a49a40ba80e925"
|
||||
name = "github.com/satori/go.uuid"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "f58768cc1a7a7e77a3bd49e98cdd21419399b6a3"
|
||||
version = "v1.2.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:81e02c4edb639c80559c0650f9401d3e2dcc3256d1fa215382bb7c83c1db9126"
|
||||
name = "github.com/shopspring/decimal"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "cd690d0c9e2447b1ef2a129a6b7b49077da89b8e"
|
||||
version = "1.1.0"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:e096613fb7cf34743d49af87d197663cfccd61876e2219853005a57baedfa562"
|
||||
name = "github.com/spf13/cobra"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "f2b07da1e2c38d5f12845a4f607e2e1018cbb1f5"
|
||||
version = "v0.0.5"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:c1b1102241e7f645bc8e0c22ae352e8f0dc6484b6cb4d132fa9f24174e0119e2"
|
||||
name = "github.com/spf13/pflag"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "298182f68c66c05229eb03ac171abe6e309ee79a"
|
||||
version = "v1.0.3"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:4c93890bbbb5016505e856cb06b5c5a2ff5b7217584d33f2a9071ebef4b5d473"
|
||||
name = "go.opencensus.io"
|
||||
packages = [
|
||||
".",
|
||||
"internal",
|
||||
"internal/tagencoding",
|
||||
"metric/metricdata",
|
||||
"metric/metricproducer",
|
||||
"plugin/ocgrpc",
|
||||
"plugin/ochttp",
|
||||
"plugin/ochttp/propagation/b3",
|
||||
"plugin/ochttp/propagation/tracecontext",
|
||||
"resource",
|
||||
"stats",
|
||||
"stats/internal",
|
||||
"stats/view",
|
||||
"tag",
|
||||
"trace",
|
||||
"trace/internal",
|
||||
"trace/propagation",
|
||||
"trace/tracestate",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "43463a80402d8447b7fce0d2c58edf1687ff0b58"
|
||||
version = "v0.19.3"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:994c4915a59f821705d08ea77b117ec7a3e6a46cc867fd194d887500dac1c3c2"
|
||||
name = "golang.org/x/crypto"
|
||||
packages = [
|
||||
"pkcs12",
|
||||
"pkcs12/internal/rc2",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "4def268fd1a49955bfb3dda92fe3db4f924f2285"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:d2aa096fe6b539afe74dd6ab8e9a160304707f1477b01c637b4bbe20d262a25c"
|
||||
name = "golang.org/x/net"
|
||||
packages = [
|
||||
"context",
|
||||
"http/httpguts",
|
||||
"http2",
|
||||
"http2/hpack",
|
||||
"idna",
|
||||
"internal/timeseries",
|
||||
"trace",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "ca1201d0de80cfde86cb01aea620983605dfe99b"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:382bb5a7fb4034db3b6a2d19e5a4a6bcf52f4750530603c01ca18a172fa3089b"
|
||||
name = "golang.org/x/sync"
|
||||
packages = ["semaphore"]
|
||||
pruneopts = "UT"
|
||||
revision = "112230192c580c3556b8cee6403af37a4fc5f28c"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:ec99dad7924bf972656818f5d62216fb987b7e077d401deb86b3c1e5e1b1d4d6"
|
||||
name = "golang.org/x/sys"
|
||||
packages = ["unix"]
|
||||
pruneopts = "UT"
|
||||
revision = "fc99dfbffb4e5ed5758a37e31dd861afe285406b"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:8d8faad6b12a3a4c819a3f9618cb6ee1fa1cfc33253abeeea8b55336721e3405"
|
||||
name = "golang.org/x/text"
|
||||
packages = [
|
||||
"collate",
|
||||
"collate/build",
|
||||
"internal/colltab",
|
||||
"internal/gen",
|
||||
"internal/language",
|
||||
"internal/language/compact",
|
||||
"internal/tag",
|
||||
"internal/triegen",
|
||||
"internal/ucd",
|
||||
"language",
|
||||
"secure/bidirule",
|
||||
"transform",
|
||||
"unicode/bidi",
|
||||
"unicode/cldr",
|
||||
"unicode/norm",
|
||||
"unicode/rangetable",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "342b2e1fbaa52c93f31447ad2c6abc048c63e475"
|
||||
version = "v0.3.2"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:87f3ab7c6341b1be840c580bdf23dcce036916bf1859e8ab8667033a09ae6097"
|
||||
name = "golang.org/x/tools"
|
||||
packages = [
|
||||
"go/ast/astutil",
|
||||
"go/gcexportdata",
|
||||
"go/internal/gcimporter",
|
||||
"go/internal/packagesdriver",
|
||||
"go/packages",
|
||||
"go/types/typeutil",
|
||||
"imports",
|
||||
"internal/fastwalk",
|
||||
"internal/gopathwalk",
|
||||
"internal/imports",
|
||||
"internal/module",
|
||||
"internal/semver",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "fc6e2057e7f6701ef9b5ef49a089bff4da7f4610"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:5f003878aabe31d7f6b842d4de32b41c46c214bb629bb485387dbcce1edf5643"
|
||||
name = "google.golang.org/api"
|
||||
packages = ["support/bundler"]
|
||||
pruneopts = "UT"
|
||||
revision = "02490b97dff7cfde1995bd77de808fd27053bc87"
|
||||
version = "v0.7.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
digest = "1:3565a93b7692277a5dea355bc47bd6315754f3246ed07a224be6aec28972a805"
|
||||
name = "google.golang.org/genproto"
|
||||
packages = [
|
||||
"googleapis/api/httpbody",
|
||||
"googleapis/rpc/status",
|
||||
"protobuf/field_mask",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "c506a9f9061087022822e8da603a52fc387115a8"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:581c9b0fe9354faf730ff231cf3682089e0b703073cf10e3976219609d27a9ea"
|
||||
name = "google.golang.org/grpc"
|
||||
packages = [
|
||||
".",
|
||||
"balancer",
|
||||
"balancer/base",
|
||||
"balancer/roundrobin",
|
||||
"binarylog/grpc_binarylog_v1",
|
||||
"codes",
|
||||
"connectivity",
|
||||
"credentials",
|
||||
"credentials/internal",
|
||||
"encoding",
|
||||
"encoding/proto",
|
||||
"grpclog",
|
||||
"internal",
|
||||
"internal/backoff",
|
||||
"internal/balancerload",
|
||||
"internal/binarylog",
|
||||
"internal/channelz",
|
||||
"internal/envconfig",
|
||||
"internal/grpcrand",
|
||||
"internal/grpcsync",
|
||||
"internal/syscall",
|
||||
"internal/transport",
|
||||
"keepalive",
|
||||
"metadata",
|
||||
"naming",
|
||||
"peer",
|
||||
"resolver",
|
||||
"resolver/dns",
|
||||
"resolver/passthrough",
|
||||
"serviceconfig",
|
||||
"stats",
|
||||
"status",
|
||||
"tap",
|
||||
]
|
||||
pruneopts = "UT"
|
||||
revision = "045159ad57f3781d409358e3ade910a018c16b30"
|
||||
version = "v1.22.1"
|
||||
|
||||
[[projects]]
|
||||
branch = "v1"
|
||||
digest = "1:af715ae33cc1f5695c4b2a4e4b21d008add8802a99e15bb467ac7c32edb5000d"
|
||||
name = "gopkg.in/check.v1"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "788fd78401277ebd861206a03c884797c6ec5541"
|
||||
|
||||
[[projects]]
|
||||
digest = "1:4d2e5a73dc1500038e504a8d78b986630e3626dc027bc030ba5c75da257cdb96"
|
||||
name = "gopkg.in/yaml.v2"
|
||||
packages = ["."]
|
||||
pruneopts = "UT"
|
||||
revision = "51d6538a90f86fe93ac480b35f37b2be17fef232"
|
||||
version = "v2.2.2"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
input-imports = [
|
||||
"github.com/Azure/go-autorest/autorest",
|
||||
"github.com/Azure/go-autorest/autorest/adal",
|
||||
"github.com/Azure/go-autorest/autorest/azure",
|
||||
"github.com/Azure/go-autorest/autorest/azure/auth",
|
||||
"github.com/Azure/go-autorest/autorest/date",
|
||||
"github.com/Azure/go-autorest/autorest/to",
|
||||
"github.com/Azure/go-autorest/autorest/validation",
|
||||
"github.com/Azure/go-autorest/tracing",
|
||||
"github.com/Masterminds/semver",
|
||||
"github.com/dnaeon/go-vcr/cassette",
|
||||
"github.com/dnaeon/go-vcr/recorder",
|
||||
"github.com/globalsign/mgo",
|
||||
"github.com/pkg/errors",
|
||||
"github.com/satori/go.uuid",
|
||||
"github.com/shopspring/decimal",
|
||||
"github.com/spf13/cobra",
|
||||
"golang.org/x/crypto/pkcs12",
|
||||
"golang.org/x/tools/imports",
|
||||
"gopkg.in/check.v1",
|
||||
]
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
|
@ -0,0 +1,56 @@
|
|||
# Gopkg.toml example
|
||||
#
|
||||
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
|
||||
# for detailed Gopkg.toml documentation.
|
||||
#
|
||||
# required = ["github.com/user/thing/cmd/thing"]
|
||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project"
|
||||
# version = "1.0.0"
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project2"
|
||||
# branch = "dev"
|
||||
# source = "github.com/myfork/project2"
|
||||
#
|
||||
# [[override]]
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
|
||||
[prune]
|
||||
unused-packages = true
|
||||
go-tests = true
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/Azure/go-autorest"
|
||||
version = "12.3.0"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/dnaeon/go-vcr"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/globalsign/mgo"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/satori/go.uuid"
|
||||
version = "1.2.0"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/shopspring/decimal"
|
||||
version = "1.0.0"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/crypto"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/tools"
|
||||
|
||||
[[constraint]]
|
||||
branch = "v1"
|
||||
name = "gopkg.in/check.v1"
|
|
@ -0,0 +1,28 @@
|
|||
{
|
||||
"$schema": "https://openapistorageprod.blob.core.windows.net/sdkautomation/prod/schemas/swagger_to_sdk_config.schema.json",
|
||||
"meta": {
|
||||
"after_scripts_in_repo": ["gofmt -w ./services/"],
|
||||
"autorest_options": {
|
||||
"use": "@microsoft.azure/autorest.go@2.1.137",
|
||||
"version": "2.0.4407",
|
||||
"go": "",
|
||||
"verbose": "",
|
||||
"sdkrel:go-sdk-folder": ".",
|
||||
"multiapi": "",
|
||||
"preview-chk": "",
|
||||
"go.clear-output-folder": false,
|
||||
"stage": "",
|
||||
"gomod-root": "github.com/Azure/azure-sdk-for-go"
|
||||
},
|
||||
"repotag": "azure-sdk-for-go",
|
||||
"envs": {
|
||||
"sdkrel:GOPATH": "../../../.."
|
||||
},
|
||||
"advanced_options": {
|
||||
"clone_dir": "./src/github.com/Azure/azure-sdk-for-go",
|
||||
"sdk_generation_pull_request_base": "integration_branch",
|
||||
"create_sdk_pull_requests": true
|
||||
},
|
||||
"version": "0.2.0"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2020 Microsoft
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
|
@ -0,0 +1,99 @@
|
|||
## Azure TestServiceClient SDK for JavaScript
|
||||
|
||||
This package contains an isomorphic SDK for TestServiceClient.
|
||||
|
||||
### Currently supported environments
|
||||
|
||||
- Node.js version 6.x.x or higher
|
||||
- Browser JavaScript
|
||||
|
||||
### How to Install
|
||||
|
||||
```bash
|
||||
npm install @azure/test-service
|
||||
```
|
||||
|
||||
### How to use
|
||||
|
||||
#### nodejs - Authentication, client creation and get test as an example written in TypeScript.
|
||||
|
||||
##### Install @azure/ms-rest-nodeauth
|
||||
|
||||
- Please install minimum version of `"@azure/ms-rest-nodeauth": "^3.0.0"`.
|
||||
```bash
|
||||
npm install @azure/ms-rest-nodeauth@"^3.0.0"
|
||||
```
|
||||
|
||||
##### Sample code
|
||||
|
||||
```typescript
|
||||
import * as msRest from "@azure/ms-rest-js";
|
||||
import * as msRestAzure from "@azure/ms-rest-azure-js";
|
||||
import * as msRestNodeAuth from "@azure/ms-rest-nodeauth";
|
||||
import { TestServiceClient, TestServiceModels, TestServiceMappers } from "@azure/test-service";
|
||||
const subscriptionId = process.env["AZURE_SUBSCRIPTION_ID"];
|
||||
|
||||
msRestNodeAuth.interactiveLogin().then((creds) => {
|
||||
const client = new TestServiceClient(creds, subscriptionId);
|
||||
client.test.get().then((result) => {
|
||||
console.log("The result is:");
|
||||
console.log(result);
|
||||
});
|
||||
}).catch((err) => {
|
||||
console.error(err);
|
||||
});
|
||||
```
|
||||
|
||||
#### browser - Authentication, client creation and get test as an example written in JavaScript.
|
||||
|
||||
##### Install @azure/ms-rest-browserauth
|
||||
|
||||
```bash
|
||||
npm install @azure/ms-rest-browserauth
|
||||
```
|
||||
|
||||
##### Sample code
|
||||
|
||||
See https://github.com/Azure/ms-rest-browserauth to learn how to authenticate to Azure in the browser.
|
||||
|
||||
- index.html
|
||||
```html
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>@azure/test-service sample</title>
|
||||
<script src="node_modules/@azure/ms-rest-js/dist/msRest.browser.js"></script>
|
||||
<script src="node_modules/@azure/ms-rest-azure-js/dist/msRestAzure.js"></script>
|
||||
<script src="node_modules/@azure/ms-rest-browserauth/dist/msAuth.js"></script>
|
||||
<script src="node_modules/@azure/test-service/dist/test-service.js"></script>
|
||||
<script type="text/javascript">
|
||||
const subscriptionId = "<Subscription_Id>";
|
||||
const authManager = new msAuth.AuthManager({
|
||||
clientId: "<client id for your Azure AD app>",
|
||||
tenant: "<optional tenant for your organization>"
|
||||
});
|
||||
authManager.finalizeLogin().then((res) => {
|
||||
if (!res.isLoggedIn) {
|
||||
// may cause redirects
|
||||
authManager.login();
|
||||
}
|
||||
const client = new Azure.TestService.TestServiceClient(res.creds, subscriptionId);
|
||||
client.test.get().then((result) => {
|
||||
console.log("The result is:");
|
||||
console.log(result);
|
||||
}).catch((err) => {
|
||||
console.log("An error occurred:");
|
||||
console.error(err);
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
<body></body>
|
||||
</html>
|
||||
```
|
||||
|
||||
## Related projects
|
||||
|
||||
- [Microsoft Azure SDK for Javascript](https://github.com/Azure/azure-sdk-for-js)
|
||||
|
||||
![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js/sdk/README.png)
|
|
@ -0,0 +1,57 @@
|
|||
{
|
||||
"name": "@azure/test-service",
|
||||
"author": "Microsoft Corporation",
|
||||
"description": "TestServiceClient Library with typescript type definitions for node.js and browser.",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"@azure/ms-rest-azure-js": "^2.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
"tslib": "^2.0.1"
|
||||
},
|
||||
"keywords": [
|
||||
"node",
|
||||
"azure",
|
||||
"typescript",
|
||||
"browser",
|
||||
"isomorphic"
|
||||
],
|
||||
"license": "MIT",
|
||||
"main": "./dist/test-service.js",
|
||||
"module": "./esm/testServiceClient.js",
|
||||
"types": "./esm/testServiceClient.d.ts",
|
||||
"devDependencies": {
|
||||
"typescript": "~5.3.3",
|
||||
"rollup": "^1.18.0",
|
||||
"rollup-plugin-node-resolve": "^5.2.0",
|
||||
"rollup-plugin-sourcemaps": "^0.4.2",
|
||||
"uglify-js": "^3.6.0"
|
||||
},
|
||||
"homepage": "https://github.com/Azure/azure-sdk-for-js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Azure/azure-sdk-for-js.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/Azure/azure-sdk-for-js/issues"
|
||||
},
|
||||
"files": [
|
||||
"dist/**/*.js",
|
||||
"dist/**/*.js.map",
|
||||
"dist/**/*.d.ts",
|
||||
"dist/**/*.d.ts.map",
|
||||
"esm/**/*.js",
|
||||
"esm/**/*.js.map",
|
||||
"esm/**/*.d.ts",
|
||||
"esm/**/*.d.ts.map",
|
||||
"src/**/*.ts",
|
||||
"README.md",
|
||||
"rollup.config.js",
|
||||
"tsconfig.json"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && rollup -c rollup.config.js && npm run minify",
|
||||
"minify": "uglifyjs -c -m --comments --source-map \"content='./dist/test-service.js.map'\" -o ./dist/test-service.min.js ./dist/test-service.js",
|
||||
"prepack": "npm run build"
|
||||
},
|
||||
"sideEffects": false
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
import rollup from "rollup";
|
||||
import nodeResolve from "rollup-plugin-node-resolve";
|
||||
import sourcemaps from "rollup-plugin-sourcemaps";
|
||||
|
||||
/**
|
||||
* @type {rollup.RollupFileOptions}
|
||||
*/
|
||||
const config = {
|
||||
input: "./esm/testServiceClient.js",
|
||||
external: [
|
||||
"@azure/ms-rest-js",
|
||||
"@azure/ms-rest-azure-js"
|
||||
],
|
||||
output: {
|
||||
file: "./dist/test-service.js",
|
||||
format: "umd",
|
||||
name: "Azure.TestService",
|
||||
sourcemap: true,
|
||||
globals: {
|
||||
"@azure/ms-rest-js": "msRest",
|
||||
"@azure/ms-rest-azure-js": "msRestAzure"
|
||||
},
|
||||
banner: `/*
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/`
|
||||
},
|
||||
plugins: [
|
||||
nodeResolve({ mainFields: ['module', 'main'] }),
|
||||
sourcemaps()
|
||||
]
|
||||
};
|
||||
|
||||
export default config;
|
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
|
||||
import { BaseResource, CloudError, AzureServiceClientOptions } from "@azure/ms-rest-azure-js";
|
||||
import * as msRest from "@azure/ms-rest-js";
|
||||
|
||||
export { BaseResource, CloudError };
|
||||
|
||||
/**
|
||||
* Mocked result.
|
||||
* @summary Test Get.
|
||||
*/
|
||||
export interface TestGetResult {
|
||||
/**
|
||||
* Test result. Test result.
|
||||
*/
|
||||
value?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An interface representing TestServiceClientOptions.
|
||||
*/
|
||||
export interface TestServiceClientOptions extends AzureServiceClientOptions {
|
||||
baseUri?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Contains response data for the get operation.
|
||||
*/
|
||||
export type TestGetResponse = TestGetResult & {
|
||||
/**
|
||||
* The underlying HTTP response.
|
||||
*/
|
||||
_response: msRest.HttpResponse & {
|
||||
/**
|
||||
* The response body as text (string format)
|
||||
*/
|
||||
bodyAsText: string;
|
||||
|
||||
/**
|
||||
* The response body as parsed JSON or XML
|
||||
*/
|
||||
parsedBody: TestGetResult;
|
||||
};
|
||||
};
|
|
@ -0,0 +1,29 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
|
||||
import { CloudErrorMapper, BaseResourceMapper } from "@azure/ms-rest-azure-js";
|
||||
import * as msRest from "@azure/ms-rest-js";
|
||||
|
||||
export const CloudError = CloudErrorMapper;
|
||||
export const BaseResource = BaseResourceMapper;
|
||||
|
||||
export const TestGetResult: msRest.CompositeMapper = {
|
||||
serializedName: "TestGetResult",
|
||||
type: {
|
||||
name: "Composite",
|
||||
className: "TestGetResult",
|
||||
modelProperties: {
|
||||
value: {
|
||||
serializedName: "value",
|
||||
type: {
|
||||
name: "String"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
|
@ -0,0 +1,32 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for
|
||||
* license information.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is
|
||||
* regenerated.
|
||||
*/
|
||||
|
||||
import * as msRest from "@azure/ms-rest-js";
|
||||
|
||||
export const acceptLanguage: msRest.OperationParameter = {
|
||||
parameterPath: "acceptLanguage",
|
||||
mapper: {
|
||||
serializedName: "accept-language",
|
||||
defaultValue: 'en-US',
|
||||
type: {
|
||||
name: "String"
|
||||
}
|
||||
}
|
||||
};
|
||||
export const apiVersion: msRest.OperationQueryParameter = {
|
||||
parameterPath: "apiVersion",
|
||||
mapper: {
|
||||
required: true,
|
||||
serializedName: "api-version",
|
||||
type: {
|
||||
name: "String"
|
||||
}
|
||||
}
|
||||
};
|
|
@ -0,0 +1,12 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
|
||||
*/
|
||||
|
||||
export {
|
||||
CloudError,
|
||||
TestGetResult
|
||||
} from "../models/mappers";
|
|
@ -0,0 +1,11 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for
|
||||
* license information.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is
|
||||
* regenerated.
|
||||
*/
|
||||
|
||||
export * from "./test";
|
|
@ -0,0 +1,74 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for
|
||||
* license information.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is
|
||||
* regenerated.
|
||||
*/
|
||||
|
||||
import * as msRest from "@azure/ms-rest-js";
|
||||
import * as Models from "../models";
|
||||
import * as Mappers from "../models/testMappers";
|
||||
import * as Parameters from "../models/parameters";
|
||||
import { TestServiceClientContext } from "../testServiceClientContext";
|
||||
|
||||
/** Class representing a Test. */
|
||||
export class Test {
|
||||
private readonly client: TestServiceClientContext;
|
||||
|
||||
/**
|
||||
* Create a Test.
|
||||
* @param {TestServiceClientContext} client Reference to the service client.
|
||||
*/
|
||||
constructor(client: TestServiceClientContext) {
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get test.
|
||||
* @param [options] The optional parameters
|
||||
* @returns Promise<Models.TestGetResponse>
|
||||
*/
|
||||
get(options?: msRest.RequestOptionsBase): Promise<Models.TestGetResponse>;
|
||||
/**
|
||||
* @param callback The callback
|
||||
*/
|
||||
get(callback: msRest.ServiceCallback<Models.TestGetResult>): void;
|
||||
/**
|
||||
* @param options The optional parameters
|
||||
* @param callback The callback
|
||||
*/
|
||||
get(options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.TestGetResult>): void;
|
||||
get(options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.TestGetResult>, callback?: msRest.ServiceCallback<Models.TestGetResult>): Promise<Models.TestGetResponse> {
|
||||
return this.client.sendOperationRequest(
|
||||
{
|
||||
options
|
||||
},
|
||||
getOperationSpec,
|
||||
callback) as Promise<Models.TestGetResponse>;
|
||||
}
|
||||
}
|
||||
|
||||
// Operation Specifications
|
||||
const serializer = new msRest.Serializer(Mappers);
|
||||
const getOperationSpec: msRest.OperationSpec = {
|
||||
httpMethod: "GET",
|
||||
path: "providers/Microsoft.TestService/test",
|
||||
queryParameters: [
|
||||
Parameters.apiVersion
|
||||
],
|
||||
headerParameters: [
|
||||
Parameters.acceptLanguage
|
||||
],
|
||||
responses: {
|
||||
200: {
|
||||
bodyMapper: Mappers.TestGetResult
|
||||
},
|
||||
default: {
|
||||
bodyMapper: Mappers.CloudError
|
||||
}
|
||||
},
|
||||
serializer
|
||||
};
|
|
@ -0,0 +1,41 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for
|
||||
* license information.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is
|
||||
* regenerated.
|
||||
*/
|
||||
|
||||
import * as msRest from "@azure/ms-rest-js";
|
||||
import * as Models from "./models";
|
||||
import * as Mappers from "./models/mappers";
|
||||
import * as operations from "./operations";
|
||||
import { TestServiceClientContext } from "./testServiceClientContext";
|
||||
|
||||
|
||||
class TestServiceClient extends TestServiceClientContext {
|
||||
// Operation groups
|
||||
test: operations.Test;
|
||||
|
||||
/**
|
||||
* Initializes a new instance of the TestServiceClient class.
|
||||
* @param credentials Credentials needed for the client to connect to Azure.
|
||||
* @param [options] The parameter options
|
||||
*/
|
||||
constructor(credentials: msRest.ServiceClientCredentials, options?: Models.TestServiceClientOptions) {
|
||||
super(credentials, options);
|
||||
this.test = new operations.Test(this);
|
||||
}
|
||||
}
|
||||
|
||||
// Operation Specifications
|
||||
|
||||
export {
|
||||
TestServiceClient,
|
||||
TestServiceClientContext,
|
||||
Models as TestServiceModels,
|
||||
Mappers as TestServiceMappers
|
||||
};
|
||||
export * from "./operations";
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for
|
||||
* license information.
|
||||
*
|
||||
* Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
* Changes may cause incorrect behavior and will be lost if the code is
|
||||
* regenerated.
|
||||
*/
|
||||
|
||||
import * as Models from "./models";
|
||||
import * as msRest from "@azure/ms-rest-js";
|
||||
import * as msRestAzure from "@azure/ms-rest-azure-js";
|
||||
|
||||
const packageName = "@azure/test-service";
|
||||
const packageVersion = "1.0.0";
|
||||
|
||||
export class TestServiceClientContext extends msRestAzure.AzureServiceClient {
|
||||
credentials: msRest.ServiceClientCredentials;
|
||||
apiVersion?: string;
|
||||
|
||||
/**
|
||||
* Initializes a new instance of the TestServiceClient class.
|
||||
* @param credentials Credentials needed for the client to connect to Azure.
|
||||
* @param [options] The parameter options
|
||||
*/
|
||||
constructor(credentials: msRest.ServiceClientCredentials, options?: Models.TestServiceClientOptions) {
|
||||
if (credentials == undefined) {
|
||||
throw new Error('\'credentials\' cannot be null.');
|
||||
}
|
||||
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
if(!options.userAgent) {
|
||||
const defaultUserAgent = msRestAzure.getDefaultUserAgentValue();
|
||||
options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;
|
||||
}
|
||||
|
||||
super(credentials, options);
|
||||
|
||||
this.apiVersion = '2020-01-01';
|
||||
this.acceptLanguage = 'en-US';
|
||||
this.longRunningOperationRetryTimeout = 30;
|
||||
this.baseUri = options.baseUri || this.baseUri || "https://management.azure.com";
|
||||
this.requestContentType = "application/json; charset=utf-8";
|
||||
this.credentials = credentials;
|
||||
|
||||
if(options.acceptLanguage !== null && options.acceptLanguage !== undefined) {
|
||||
this.acceptLanguage = options.acceptLanguage;
|
||||
}
|
||||
if(options.longRunningOperationRetryTimeout !== null && options.longRunningOperationRetryTimeout !== undefined) {
|
||||
this.longRunningOperationRetryTimeout = options.longRunningOperationRetryTimeout;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"module": "es6",
|
||||
"moduleResolution": "node",
|
||||
"strict": true,
|
||||
"target": "es5",
|
||||
"sourceMap": true,
|
||||
"declarationMap": true,
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"lib": ["es6", "dom"],
|
||||
"declaration": true,
|
||||
"outDir": "./esm",
|
||||
"importHelpers": true
|
||||
},
|
||||
"include": ["./src/**/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"meta": {
|
||||
"autorest_options": {
|
||||
"version": "V2",
|
||||
"typescript": "",
|
||||
"license-header": "MICROSOFT_MIT_NO_VERSION",
|
||||
"sdkrel:typescript-sdks-folder": ".",
|
||||
"use": "@microsoft.azure/autorest.typescript@4.4.4"
|
||||
},
|
||||
"advanced_options": {
|
||||
"create_sdk_pull_requests": true,
|
||||
"sdk_generation_pull_request_base": "integration_branch"
|
||||
},
|
||||
"version": "0.2.0"
|
||||
},
|
||||
"advancedOptions": {
|
||||
"generationCallMode": "one-per-config"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
{
|
||||
"$schema": "https://openapistorageprod.blob.core.windows.net/sdkautomation/prod/schemas/swagger_to_sdk_config.schema.json",
|
||||
"meta": {
|
||||
"autorest_options": {
|
||||
"use": "@autorest/python@5.3.0",
|
||||
"python": "",
|
||||
"python-mode": "update",
|
||||
"sdkrel:python-sdks-folder": "./sdk/.",
|
||||
"multiapi": "",
|
||||
"track2": ""
|
||||
},
|
||||
"advanced_options": {
|
||||
"create_sdk_pull_requests": true,
|
||||
"sdk_generation_pull_request_base": "integration_branch"
|
||||
},
|
||||
"repotag": "azure-sdk-for-python",
|
||||
"version": "0.2.0"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
# Python cache
|
||||
__pycache__/
|
||||
*.pyc
|
||||
.pytest_cache
|
||||
.mypy_cache
|
||||
.cache
|
||||
|
||||
# Virtual environment
|
||||
env*/
|
||||
|
||||
# PTVS analysis
|
||||
.ptvs/
|
||||
|
||||
# Coverage report
|
||||
**/.coverage
|
||||
_coverage
|
||||
|
||||
# Build results
|
||||
bin/
|
||||
obj/
|
||||
dist/
|
||||
MANIFEST
|
||||
_docs
|
||||
|
||||
# Result of running python setup.py install/pip install -e
|
||||
RECORD.txt
|
||||
build/
|
||||
*.egg-info/
|
||||
.tox_pip_cache*/
|
||||
|
||||
# Test results
|
||||
TestResults/
|
||||
|
||||
# tox generated artifacts
|
||||
test-junit-*.xml
|
||||
pylint-*.out.txt
|
||||
coverage-*.xml
|
||||
stderr.txt
|
||||
stdout.txt
|
||||
|
||||
# tox environment folders
|
||||
.tox/
|
||||
|
||||
# Credentials
|
||||
credentials_real.json
|
||||
testsettings_local.json
|
||||
testsettings_local.cfg
|
||||
servicebus_settings_real.py
|
||||
storage_settings_real.py
|
||||
legacy_mgmt_settings_real.py
|
||||
mgmt_settings_real.py
|
||||
app_creds_real.py
|
||||
|
||||
# User-specific files
|
||||
*.suo
|
||||
*.user
|
||||
*.sln.docstates
|
||||
.vs/
|
||||
.vscode/
|
||||
|
||||
# Windows image file caches
|
||||
Thumbs.db
|
||||
ehthumbs.db
|
||||
|
||||
# Folder config file
|
||||
Desktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Mac desktop service store files
|
||||
.DS_Store
|
||||
|
||||
.idea
|
||||
src/build
|
||||
*.iml
|
||||
/doc/_build
|
||||
/doc/sphinx/examples
|
||||
/doc/sphinx/tests
|
||||
/.vs/config/applicationhost.config
|
||||
|
||||
# Azure deployment credentials
|
||||
*.pubxml
|
||||
|
||||
# [begoldsm] ignore virtual env if it exists.
|
||||
adlEnv/
|
||||
|
||||
code_reports
|
||||
|
||||
# Azure Storage test credentials
|
||||
sdk/storage/azure-storage-blob/tests/settings_real.py
|
||||
sdk/storage/azure-storage-queue/tests/settings_real.py
|
||||
sdk/storage/azure-storage-file-share/tests/settings_real.py
|
||||
sdk/storage/azure-storage-file-datalake/tests/settings_real.py
|
||||
*.code-workspace
|
||||
sdk/cosmos/azure-cosmos/test/test_config.py
|
||||
|
||||
# temp path to to run regression test
|
||||
.tmp_code_path/
|
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for
|
||||
# license information.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import glob
|
||||
from subprocess import check_call
|
||||
|
||||
|
||||
DEFAULT_DEST_FOLDER = "./dist"
|
||||
|
||||
def create_package(name, dest_folder=DEFAULT_DEST_FOLDER):
|
||||
# a package will exist in either one, or the other folder. this is why we can resolve both at the same time.
|
||||
absdirs = [os.path.dirname(package) for package in (glob.glob('{}/setup.py'.format(name)) + glob.glob('sdk/*/{}/setup.py'.format(name)))]
|
||||
|
||||
absdirpath = os.path.abspath(absdirs[0])
|
||||
check_call(['python', 'setup.py', 'bdist_wheel', '-d', dest_folder], cwd=absdirpath)
|
||||
check_call(['python', 'setup.py', "sdist", "--format", "zip", '-d', dest_folder], cwd=absdirpath)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Build Azure package.')
|
||||
parser.add_argument('name', help='The package name')
|
||||
parser.add_argument('--dest', '-d', default=DEFAULT_DEST_FOLDER,
|
||||
help='Destination folder. Relative to the package dir. [default: %(default)s]')
|
||||
|
||||
args = parser.parse_args()
|
||||
create_package(args.name, args.dest)
|
|
@ -0,0 +1,417 @@
|
|||
#!/usr/bin/env python
|
||||
from __future__ import print_function, unicode_literals
|
||||
import argparse
|
||||
import ast
|
||||
from datetime import datetime
|
||||
import glob
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
from pkg_resources import Requirement
|
||||
import re
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
# Todo: This should use a common omit logic once ci scripts are refactored into ci_tools
|
||||
skip_pkgs = [
|
||||
'azure-mgmt-documentdb', # deprecated
|
||||
'azure-sdk-for-python', # top-level package
|
||||
'azure-sdk-tools', # internal tooling for automation
|
||||
'azure-servicemanagement-legacy', # legacy (not officially deprecated)
|
||||
'azure-common',
|
||||
'azure',
|
||||
'azure-keyvault'
|
||||
]
|
||||
|
||||
def report_should_skip_lib(lib_name):
|
||||
return lib_name in skip_pkgs or lib_name.endswith('-nspkg')
|
||||
|
||||
def dump_should_skip_lib(lib_name):
|
||||
return report_should_skip_lib(lib_name) or '-mgmt' in lib_name or not lib_name.startswith('azure')
|
||||
|
||||
def locate_libs(base_dir):
|
||||
packages = [os.path.dirname(p) for p in (glob.glob(os.path.join(base_dir, 'azure*', 'setup.py')) + glob.glob(os.path.join(base_dir, 'sdk/*/azure*', 'setup.py')))]
|
||||
return sorted(packages)
|
||||
|
||||
def locate_wheels(base_dir):
|
||||
wheels = glob.glob(os.path.join(base_dir, '*.whl'))
|
||||
return sorted(wheels)
|
||||
|
||||
def parse_req(req):
|
||||
try:
|
||||
req_object = Requirement.parse(req)
|
||||
req_name = req_object.key
|
||||
spec = str(req_object).replace(req_name, '')
|
||||
return (req_name, spec)
|
||||
except:
|
||||
print('Failed to parse requirement %s' % (req))
|
||||
|
||||
def record_dep(dependencies, req_name, spec, lib_name):
|
||||
if not req_name in dependencies:
|
||||
dependencies[req_name] = {}
|
||||
if not spec in dependencies[req_name]:
|
||||
dependencies[req_name][spec] = []
|
||||
dependencies[req_name][spec].append(lib_name)
|
||||
|
||||
|
||||
def get_lib_deps(base_dir):
|
||||
packages = {}
|
||||
dependencies = {}
|
||||
for lib_dir in locate_libs(base_dir):
|
||||
try:
|
||||
setup_path = os.path.join(lib_dir, 'setup.py')
|
||||
lib_name, version, requires = parse_setup(setup_path)
|
||||
|
||||
packages[lib_name] = {
|
||||
'version': version,
|
||||
'source': lib_dir,
|
||||
'deps': []
|
||||
}
|
||||
|
||||
for req in requires:
|
||||
req_name, spec = parse_req(req)
|
||||
packages[lib_name]['deps'].append({
|
||||
'name': req_name,
|
||||
'version': spec
|
||||
})
|
||||
if not report_should_skip_lib(lib_name):
|
||||
record_dep(dependencies, req_name, spec, lib_name)
|
||||
except:
|
||||
print('Failed to parse %s' % (setup_path))
|
||||
return packages, dependencies
|
||||
|
||||
def get_wheel_deps(wheel_dir):
|
||||
from wheel.pkginfo import read_pkg_info_bytes
|
||||
from wheel.wheelfile import WheelFile
|
||||
|
||||
packages = {}
|
||||
dependencies = {}
|
||||
for whl_path in locate_wheels(wheel_dir):
|
||||
try:
|
||||
with WheelFile(whl_path) as whl:
|
||||
pkg_info = read_pkg_info_bytes(whl.read(whl.dist_info_path + '/METADATA'))
|
||||
lib_name = pkg_info.get('Name')
|
||||
|
||||
packages[lib_name] = {
|
||||
'version': pkg_info.get('Version'),
|
||||
'source': whl_path,
|
||||
'deps': []
|
||||
}
|
||||
|
||||
requires = pkg_info.get_all('Requires-Dist')
|
||||
for req in requires:
|
||||
req = req.split(';')[0] # Extras conditions appear after a semicolon
|
||||
req = re.sub(r'[\s\(\)]', '', req) # Version specifiers appear in parentheses
|
||||
req_name, spec = parse_req(req)
|
||||
packages[lib_name]['deps'].append({
|
||||
'name': req_name,
|
||||
'version': spec
|
||||
})
|
||||
if not report_should_skip_lib(lib_name):
|
||||
record_dep(dependencies, req_name, spec, lib_name)
|
||||
except:
|
||||
print('Failed to parse METADATA from %s' % (whl_path))
|
||||
return packages, dependencies
|
||||
|
||||
def parse_setup(setup_filename):
|
||||
mock_setup = textwrap.dedent('''\
|
||||
def setup(*args, **kwargs):
|
||||
__setup_calls__.append((args, kwargs))
|
||||
''')
|
||||
parsed_mock_setup = ast.parse(mock_setup, filename=setup_filename)
|
||||
with io.open(setup_filename, 'r', encoding='utf-8-sig') as setup_file:
|
||||
parsed = ast.parse(setup_file.read())
|
||||
for index, node in enumerate(parsed.body[:]):
|
||||
if (
|
||||
not isinstance(node, ast.Expr) or
|
||||
not isinstance(node.value, ast.Call) or
|
||||
not hasattr(node.value.func, 'id') or
|
||||
node.value.func.id != 'setup'
|
||||
):
|
||||
continue
|
||||
parsed.body[index:index] = parsed_mock_setup.body
|
||||
break
|
||||
|
||||
fixed = ast.fix_missing_locations(parsed)
|
||||
codeobj = compile(fixed, setup_filename, 'exec')
|
||||
local_vars = {}
|
||||
global_vars = {'__setup_calls__': []}
|
||||
current_dir = os.getcwd()
|
||||
working_dir = os.path.dirname(setup_filename)
|
||||
os.chdir(working_dir)
|
||||
exec(codeobj, global_vars, local_vars)
|
||||
os.chdir(current_dir)
|
||||
_, kwargs = global_vars['__setup_calls__'][0]
|
||||
|
||||
version = kwargs['version']
|
||||
name = kwargs['name']
|
||||
requires = []
|
||||
if 'install_requires' in kwargs:
|
||||
requires += kwargs['install_requires']
|
||||
if 'extras_require' in kwargs:
|
||||
for extra in kwargs['extras_require'].values():
|
||||
requires += extra
|
||||
return name, version, requires
|
||||
|
||||
def dict_compare(d1, d2):
|
||||
d1_keys = set(d1.keys())
|
||||
d2_keys = set(d2.keys())
|
||||
intersect_keys = d1_keys.intersection(d2_keys)
|
||||
added = d1_keys - d2_keys
|
||||
removed = d2_keys - d1_keys
|
||||
modified = {o : (d1[o], d2[o]) for o in intersect_keys if d1[o] != d2[o]}
|
||||
return added, removed, modified
|
||||
|
||||
def render_report(output_path, report_context):
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(os.path.dirname(os.path.realpath(__file__)))
|
||||
)
|
||||
template = env.get_template('deps.html.j2')
|
||||
with io.open(output_path, 'w', encoding='utf-8') as output:
|
||||
output.write(template.render(report_context))
|
||||
|
||||
def get_dependent_packages(data_pkgs):
|
||||
# Get unique set of Azure SDK packages that are added as required package
|
||||
deps = []
|
||||
for v in data_pkgs.values():
|
||||
deps.extend([dep['name'] for dep in v['deps'] if not dump_should_skip_lib(dep['name'])])
|
||||
return set(deps)
|
||||
|
||||
def dump_packages(data_pkgs):
|
||||
dump_data = {}
|
||||
unique_dependent_packages = get_dependent_packages(data_pkgs)
|
||||
for p_name, p_data in data_pkgs.items():
|
||||
p_id = p_name + ':' + p_data['version']
|
||||
dep = [p for p in p_data['deps'] if not dump_should_skip_lib(p['name'])]
|
||||
# Add package if it requires other azure sdk package or if it is added as required by other sdk package
|
||||
if len(dep) > 0 or p_name in unique_dependent_packages:
|
||||
dump_data[p_id] = {
|
||||
'name': p_name,
|
||||
'version': p_data['version'],
|
||||
'type': 'internal',
|
||||
'deps': dep
|
||||
}
|
||||
|
||||
return dump_data
|
||||
|
||||
def resolve_lib_deps(dump_data, data_pkgs, pkg_id):
|
||||
for dep in dump_data[pkg_id]['deps']:
|
||||
dep_req = Requirement.parse(dep['name'] + dep['version'])
|
||||
if dep['name'] in data_pkgs and data_pkgs[dep['name']]['version'] in dep_req:
|
||||
# If the internal package version matches the dependency spec,
|
||||
# rewrite the dep version to match the internal package version
|
||||
dep['version'] = data_pkgs[dep['name']]['version']
|
||||
else:
|
||||
dep_id = dep['name'] + ':' + dep['version']
|
||||
if not dep_id in dump_data:
|
||||
dump_data[dep_id] = {
|
||||
'name': dep['name'],
|
||||
'version': dep['version'],
|
||||
'type': 'internalbinary' if dep['name'] in data_pkgs else 'external',
|
||||
'deps': []
|
||||
}
|
||||
|
||||
if __name__ == '__main__':
|
||||
base_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
|
||||
parser = argparse.ArgumentParser(description='''\
|
||||
Analyze dependencies in Python packages. First, all declared dependencies
|
||||
and the libraries that declare them will be discovered (visible with
|
||||
--verbose). Next, all declared dependency version specs will be analyzed to
|
||||
ensure they are consistent across all libraries. Finally, all declared
|
||||
dependency version specs will be compared to the frozen version specs in
|
||||
shared_requirements.txt, or if --freeze is provided, all declared dependency
|
||||
version specs will be frozen to shared_requirements.txt.
|
||||
''')
|
||||
parser.add_argument('--verbose', help='verbose output', action='store_true')
|
||||
parser.add_argument('--freeze', help='freeze dependencies after analyzing (otherwise, validate dependencies against frozen list)', action='store_true')
|
||||
parser.add_argument('--out', metavar='FILE', help='write HTML-formatted report to FILE')
|
||||
parser.add_argument('--dump', metavar='FILE', help='write JSONP-formatted dependency data to FILE')
|
||||
parser.add_argument('--wheeldir', metavar='DIR', help='analyze wheels in DIR rather than source packages in this repository')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.out:
|
||||
try:
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
except:
|
||||
print("Jinja2 is required to render the dependency report. Please install with 'pip install Jinja2' to use this option.")
|
||||
sys.exit(1)
|
||||
|
||||
if args.wheeldir:
|
||||
all_packages, dependencies = get_wheel_deps(args.wheeldir)
|
||||
else:
|
||||
all_packages, dependencies = get_lib_deps(base_dir)
|
||||
|
||||
packages = {k: v for k,v in all_packages.items() if not report_should_skip_lib(k)}
|
||||
|
||||
if args.verbose:
|
||||
print('Packages analyzed')
|
||||
print('=================')
|
||||
for package in sorted(packages.keys()):
|
||||
info = packages[package]
|
||||
print("%s %s" % (package, info['version']))
|
||||
print(" from %s" % (info['source']))
|
||||
|
||||
print('\n\nRequirements discovered')
|
||||
print('=======================')
|
||||
for requirement in sorted(dependencies.keys()):
|
||||
specs = dependencies[requirement]
|
||||
libs = []
|
||||
print('%s' % (requirement))
|
||||
for spec in specs.keys():
|
||||
print('%s' % (spec if spec else '(empty)'))
|
||||
for lib in specs[spec]:
|
||||
print(' * %s' % (lib))
|
||||
print('')
|
||||
|
||||
inconsistent = []
|
||||
for requirement in sorted(dependencies.keys()):
|
||||
specs = dependencies[requirement]
|
||||
num_specs = len(specs)
|
||||
if num_specs == 1:
|
||||
continue
|
||||
|
||||
if not inconsistent and args.verbose:
|
||||
print('\nInconsistencies detected')
|
||||
print('========================')
|
||||
|
||||
inconsistent.append(requirement)
|
||||
if args.verbose:
|
||||
print("Requirement '%s' has %s unique specifiers:" % (requirement, num_specs))
|
||||
for spec in sorted(specs.keys()):
|
||||
libs = specs[spec]
|
||||
friendly_spec = '(none)' if spec == '' else spec
|
||||
print(" '%s'" % (friendly_spec))
|
||||
print(' ' + ('-' * (len(friendly_spec) + 2)))
|
||||
for lib in sorted(libs):
|
||||
print(' * %s' % (lib))
|
||||
print('')
|
||||
|
||||
frozen_filename = os.path.join(base_dir, 'shared_requirements.txt')
|
||||
if args.freeze:
|
||||
if inconsistent:
|
||||
print('Unable to freeze requirements due to incompatible dependency versions')
|
||||
sys.exit(1)
|
||||
else:
|
||||
with io.open(frozen_filename, 'w', encoding='utf-8') as frozen_file:
|
||||
for requirement in sorted(dependencies.keys()):
|
||||
spec = list(dependencies[requirement].keys())[0]
|
||||
if spec == '':
|
||||
print("Requirement '%s' being frozen with no version spec" % requirement)
|
||||
frozen_file.write(requirement + spec + '\n')
|
||||
print('Current requirements frozen to %s' % (frozen_filename))
|
||||
sys.exit(0)
|
||||
|
||||
frozen = {}
|
||||
overrides = {}
|
||||
override_count = 0
|
||||
try:
|
||||
with io.open(frozen_filename, 'r', encoding='utf-8-sig') as frozen_file:
|
||||
for line in frozen_file:
|
||||
if line.startswith('#override'):
|
||||
_, lib_name, req_override = line.split(' ', 2)
|
||||
req_override_name, override_spec = parse_req(req_override)
|
||||
record_dep(overrides, req_override_name, override_spec, lib_name)
|
||||
override_count += 1
|
||||
elif not line.startswith('#'):
|
||||
req_name, spec = parse_req(line)
|
||||
frozen[req_name] = [spec]
|
||||
except:
|
||||
print('Unable to open shared_requirements.txt, shared requirements have not been validated')
|
||||
|
||||
missing_reqs, new_reqs, changed_reqs = {}, {}, {}
|
||||
non_overridden_reqs_count = 0
|
||||
exitcode = 0
|
||||
if frozen:
|
||||
flat_deps = {req: sorted(dependencies[req].keys()) for req in dependencies}
|
||||
missing_reqs, new_reqs, changed_reqs = dict_compare(frozen, flat_deps)
|
||||
if args.verbose and len(overrides) > 0:
|
||||
print('\nThe following requirement overrides are in place:')
|
||||
for overridden_req in overrides:
|
||||
for spec in overrides[overridden_req]:
|
||||
libs = ', '.join(sorted(overrides[overridden_req][spec]))
|
||||
print(' * %s is allowed for %s' % (overridden_req + spec, libs))
|
||||
if args.verbose and len(missing_reqs) > 0:
|
||||
print('\nThe following requirements are frozen but do not exist in any current library:')
|
||||
for missing_req in missing_reqs:
|
||||
[spec] = frozen[missing_req]
|
||||
print(' * %s' % (missing_req + spec))
|
||||
if len(new_reqs) > 0:
|
||||
exitcode = 1
|
||||
if args.verbose:
|
||||
for new_req in new_reqs:
|
||||
for spec in dependencies[new_req]:
|
||||
libs = dependencies[new_req][spec]
|
||||
print("\nRequirement '%s' is declared in the following libraries but has not been frozen:" % (new_req + spec))
|
||||
for lib in libs:
|
||||
print(" * %s" % (lib))
|
||||
if len(changed_reqs) > 0:
|
||||
for changed_req in changed_reqs:
|
||||
frozen_specs, current_specs = changed_reqs[changed_req]
|
||||
unmatched_specs = set(current_specs) - set(frozen_specs)
|
||||
override_specs = overrides.get(changed_req, [])
|
||||
|
||||
for spec in unmatched_specs:
|
||||
if spec in override_specs:
|
||||
non_overridden_libs = set(dependencies[changed_req][spec]) - set(override_specs[spec])
|
||||
else:
|
||||
non_overridden_libs = dependencies[changed_req][spec]
|
||||
|
||||
if len(non_overridden_libs) > 0:
|
||||
exitcode = 1
|
||||
non_overridden_reqs_count += 1
|
||||
if args.verbose:
|
||||
print("\nThe following libraries declare requirement '%s' which does not match the frozen requirement '%s':" % (changed_req + spec, changed_req + frozen_specs[0]))
|
||||
for lib in non_overridden_libs:
|
||||
print(" * %s" % (lib))
|
||||
if exitcode == 0:
|
||||
if args.verbose:
|
||||
print('')
|
||||
print('All library dependencies validated against frozen requirements')
|
||||
elif not args.verbose:
|
||||
print('Library dependencies do not match frozen requirements, run this script with --verbose for details')
|
||||
elif inconsistent:
|
||||
exitcode = 1
|
||||
|
||||
if exitcode == 1:
|
||||
if not args.verbose:
|
||||
print('\nIncompatible dependency versions detected in libraries, run this script with --verbose for details')
|
||||
else:
|
||||
print('\nAll library dependencies verified, no incompatible versions detected')
|
||||
|
||||
if args.out:
|
||||
external = [k for k in dependencies if k not in packages and not report_should_skip_lib(k)]
|
||||
def display_order(k):
|
||||
if k in inconsistent:
|
||||
return 'a' + k if k in external else 'b' + k
|
||||
else:
|
||||
return 'c' + k if k in external else 'd' + k
|
||||
|
||||
render_report(args.out, {
|
||||
'changed_reqs': changed_reqs,
|
||||
'curtime': datetime.utcnow(),
|
||||
'dependencies': dependencies,
|
||||
'env': os.environ,
|
||||
'external': external,
|
||||
'frozen': frozen,
|
||||
'inconsistent': inconsistent,
|
||||
'missing_reqs': missing_reqs,
|
||||
'new_reqs': new_reqs,
|
||||
'non_overridden_reqs_count': non_overridden_reqs_count,
|
||||
'ordered_deps': sorted(dependencies.keys(), key=display_order),
|
||||
'override_count': override_count,
|
||||
'overrides': overrides,
|
||||
'packages': packages,
|
||||
'repo_name': 'azure-sdk-for-python'
|
||||
})
|
||||
|
||||
if args.dump:
|
||||
data_pkgs = {k: v for k, v in all_packages.items() if not dump_should_skip_lib(k)}
|
||||
dump_data = dump_packages(data_pkgs)
|
||||
pkg_ids = [k for k in dump_data.keys()]
|
||||
for pkg_id in pkg_ids:
|
||||
resolve_lib_deps(dump_data, data_pkgs, pkg_id)
|
||||
with io.open(args.dump, 'w', encoding='utf-8') as dump_file:
|
||||
dump_file.write('const data = ' + json.dumps(dump_data) + ';')
|
||||
|
||||
sys.exit(exitcode)
|
|
@ -0,0 +1,284 @@
|
|||
{% set branch = env.get('SYSTEM_PULLREQUEST_SOURCEBRANCH') if env.get('SYSTEM_PULLREQUEST_SOURCEBRANCH') else env.get('BUILD_SOURCEBRANCHNAME') %}
|
||||
{% set build = env.get('BUILD_BUILDNUMBER') %}
|
||||
{% set build_url = '%s%s/_build/results?buildId=%s' % (env.get('SYSTEM_TEAMFOUNDATIONCOLLECTIONURI'), env.get('SYSTEM_TEAMPROJECT'), env.get('BUILD_BUILDID')) %}
|
||||
{% set commit = env.get('BUILD_SOURCEVERSION') %}
|
||||
{% set isfork = env.get('SYSTEM_PULLREQUEST_ISFORK') == 'True' %}
|
||||
{% set rel_url = env.get('RELEASE_RELEASEWEBURL') %}
|
||||
{% set release = env.get('RELEASE_RELEASENAME') %}
|
||||
{% set repo = env.get('BUILD_REPOSITORY_NAME') if isfork else ('Azure/' + repo_name) %}
|
||||
{% macro pluralize(num, singular, plural) -%}
|
||||
{% if num == 1 %}{{ singular }}{% else %}{{ plural }}{% endif %}
|
||||
{%- endmacro %}
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>{{ repo_name|capitalize }} Dependency Report</title>
|
||||
<meta charset="UTF-8"/>
|
||||
<style>
|
||||
body {
|
||||
font-family: Verdana, sans-serif;
|
||||
font-size: 14px;
|
||||
text-size-adjust: none;
|
||||
}
|
||||
table {
|
||||
border-spacing: 0px;
|
||||
width: 65%;
|
||||
font-size: 14px;
|
||||
}
|
||||
table.condensed tr td {
|
||||
padding: 7px 15px;
|
||||
}
|
||||
th, td {
|
||||
padding: 15px;
|
||||
border-bottom: 1px solid #ddd;
|
||||
vertical-align:top;
|
||||
}
|
||||
tr:nth-child(even) {
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
th {
|
||||
background-color: #2E7CAF;
|
||||
color: white;
|
||||
font-weight: 300;
|
||||
text-align: left;
|
||||
}
|
||||
th a {
|
||||
color: white;
|
||||
}
|
||||
th.inconsistent {
|
||||
background-color: #FF0000;
|
||||
}
|
||||
td.version {
|
||||
width: 75px;
|
||||
}
|
||||
.tooltip {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
border-bottom: 1px dotted black;
|
||||
}
|
||||
.tooltip .tooltiptext {
|
||||
visibility: hidden;
|
||||
background-color: black;
|
||||
color: #fff;
|
||||
white-space: nowrap;
|
||||
text-align: left;
|
||||
padding: 5px;
|
||||
font-size: 14px;
|
||||
position: absolute;
|
||||
z-index: 1;
|
||||
margin-top: 7px;
|
||||
top: 100%;
|
||||
left: 0%;
|
||||
}
|
||||
.tooltip .tooltiptext::after {
|
||||
content: " ";
|
||||
position: absolute;
|
||||
bottom: 100%; /* At the top of the tooltip */
|
||||
left: 5%;
|
||||
margin-left: -5px;
|
||||
border-width: 5px;
|
||||
border-style: solid;
|
||||
border-color: transparent transparent black transparent;
|
||||
}
|
||||
.tooltip:hover .tooltiptext {
|
||||
visibility: visible;
|
||||
}
|
||||
|
||||
.success {
|
||||
color: #00CC00;
|
||||
}
|
||||
.fail {
|
||||
color: #CC0000;
|
||||
}
|
||||
|
||||
.dep_type {
|
||||
border: 1px solid gray;
|
||||
border-radius: 2px;
|
||||
background: lightgray;
|
||||
font-size: 10px;
|
||||
padding: 1px 2px;
|
||||
}
|
||||
|
||||
@media only screen and (max-width: 1350px) {
|
||||
body, table {
|
||||
font-size: 25px;
|
||||
}
|
||||
table {
|
||||
width: 95%;
|
||||
}
|
||||
td.version {
|
||||
width: 35px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<center>
|
||||
<h1>{{ repo_name|capitalize }} Dependency Report</h1>
|
||||
<h3>
|
||||
Generated at {{ curtime.replace(microsecond=0).isoformat() }}Z
|
||||
{% if release %}
|
||||
for release <a href="{{ rel_url }}">{{ release }}</a>
|
||||
{% elif build %}
|
||||
for build <a href="{{ build_url }}">{{ build }}</a>
|
||||
{% if branch %}
|
||||
<br/>from branch <a href="https://github.com/{{ repo }}/tree/{{ branch }}">{{ branch }}</a>
|
||||
{% if isfork %}
|
||||
in repo <a href="https://github.com/{{ repo }}">{{ repo }}</a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if commit %}
|
||||
(<a href="https://github.com/{{ repo }}/commit/{{ commit }}">{{ commit[:7] }}</a>)
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</h3>
|
||||
<p>
|
||||
{{ dependencies|length - external|length }} internal and {{ external|length }} external package <a href="#dependencies">{{ pluralize(dependencies|length,'dependency</a> was','dependencies</a> were') }} analyzed to determine if any packages declare inconsistent dependency versions.<br/>
|
||||
{% if inconsistent %}
|
||||
<strong>{{ inconsistent|length }} inconsistent package dependency {{ pluralize(inconsistent|length,'version was','versions were') }} discovered.</strong><br/><br/>
|
||||
{% else %}
|
||||
No inconsistent package dependency versions were discovered.<br/><br/>
|
||||
{% endif %}
|
||||
{% if frozen %}
|
||||
{{ frozen|length }} dependency {{ pluralize(frozen|length,'version was','versions were') }} discovered in the <a href="#lockfile">lockfile</a>.<br/>
|
||||
{% if override_count %}
|
||||
<strong>{{ override_count }} dependency version {{ pluralize(override_count,'override is','overrides are') }} present, causing dependency versions to differ from the version in the lockfile.</strong><br/>
|
||||
{% endif %}
|
||||
{% if new_reqs %}
|
||||
<strong>{{ new_reqs|length }} {{ pluralize(new_reqs|length,'dependency is','dependencies are') }} missing from the lockfile.</strong><br/>
|
||||
{% endif %}
|
||||
{% if non_overridden_reqs_count %}
|
||||
<strong>{{ non_overridden_reqs_count }} dependency {{ pluralize(non_overridden_reqs_count,'version does','versions do') }} not match the version in the lockfile.</strong><br/>
|
||||
{% endif %}
|
||||
{% if not new_reqs and not non_overridden_reqs_count %}
|
||||
All declared dependency versions were validated against those in the lockfile.<br/>
|
||||
{% endif %}
|
||||
{% else %}
|
||||
<strong>No lockfile is present in the repo, declared dependency versions were not able to be validated.</strong><br/>
|
||||
{% endif %}
|
||||
<br/>This report scanned {{ packages|length }} <a href="#packages">{{ pluralize(packages|length,'package','packages') }}</a>.
|
||||
</p>
|
||||
<a name="dependencies"/>
|
||||
{% for dep_name in ordered_deps %}
|
||||
<a name="dep_{{ dep_name }}"/>
|
||||
<table>
|
||||
<thead>
|
||||
{% if loop.index == 1 %}
|
||||
<tr><th colspan="2"><strong>Dependencies Discovered in Packages</strong></th></tr>
|
||||
{% endif %}
|
||||
<tr>
|
||||
{% if dep_name in external %}
|
||||
{% set dep_type = "external" %}
|
||||
{% else %}
|
||||
{% set dep_type = "internal" %}
|
||||
{% endif %}
|
||||
{% if dep_name in inconsistent %}
|
||||
{% set dep_type = "inconsistent " + dep_type %}
|
||||
{% endif %}
|
||||
<th colspan="2" class="{{ dep_type }}"><strong>{{ dep_type|title }} Dependency:</strong> {{ dep_name }}</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for requirement, packages in dependencies[dep_name]|dictsort %}
|
||||
<tr>
|
||||
<td class="version">{{ requirement if requirement else '(empty)' }}</td>
|
||||
<td>
|
||||
{% for package_name in packages|sort %}
|
||||
{{ package_name }}
|
||||
{% if dep_name in overrides and requirement in overrides[dep_name] and package_name in overrides[dep_name][requirement] %}
|
||||
<span class="dep_type">override</span>
|
||||
{% endif %}
|
||||
<br/>
|
||||
{% endfor %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<br/>
|
||||
{% endfor %}
|
||||
<br/><br/><hr/><br/><br/>
|
||||
<a name="lockfile"/>
|
||||
<table class="condensed">
|
||||
<thead>
|
||||
<tr><th colspan="3"><strong>Dependencies Frozen in Lockfile</strong></th></tr>
|
||||
{% if frozen %}
|
||||
<tr><th>Dependency</th><th>Frozen Version</th><th>Dependency State</th></tr>
|
||||
{% endif %}
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for name, versions in frozen|dictsort %}
|
||||
<tr>
|
||||
<td><a href="#dep_{{ name }}">{{ name }}</a></td>
|
||||
<td>{{ versions[0] if versions[0] else '(empty)' }}</td>
|
||||
{% if name in missing_reqs %}
|
||||
<td>⚠️ No packages reference this dependency</td>
|
||||
{% elif name in changed_reqs %}
|
||||
<td>
|
||||
<div class="tooltip">❌ One or more packages reference a different version of this dependency
|
||||
<span class="tooltiptext">
|
||||
{% for spec, libs in dependencies[name]|dictsort %}
|
||||
{% if spec != versions[0] %}
|
||||
{% for package in libs|sort %}
|
||||
{{ package }} ({{ spec }})
|
||||
<br/>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
</span>
|
||||
</div>
|
||||
</td>
|
||||
{% else %}
|
||||
<td>✅ All packages validated against this dependency and version</td>
|
||||
{% endif %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
{% if not frozen %}
|
||||
<tr><td colspan="2">Unable to open shared_requirements.txt, shared requirements have not been validated</td></tr>
|
||||
{% endif %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% if frozen and new_reqs %}
|
||||
<br/>
|
||||
{% for name in new_reqs|sort %}
|
||||
<table class="condensed">
|
||||
<thead>
|
||||
{% if loop.index == 1 %}
|
||||
<tr><th colspan="2" class="inconsistent"><strong>Dependencies Missing from Lockfile</strong></th></tr>
|
||||
{% endif %}
|
||||
<tr><th colspan="2" class="inconsistent"><strong>Missing Dependency:</strong> <a href="#dep_{{ name }}">{{ name }}</a></th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for requirement, packages in dependencies[name]|dictsort %}
|
||||
<tr>
|
||||
<td class="version">{{ requirement if requirement else '(empty)' }}</td>
|
||||
<td>
|
||||
{% for package_name in packages|sort %}
|
||||
{{ package_name }}<br/>
|
||||
{% endfor %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
<br/><br/><hr/><br/><br/>
|
||||
<a name="packages"/>
|
||||
<table class="condensed">
|
||||
<thead>
|
||||
<tr><th colspan="3"><strong>Packages Scanned for this Report</strong></th></tr>
|
||||
<tr><th>Package Analyzed</th><th>Package Version</th><th>Package Source</th></tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for name, info in packages|dictsort %}
|
||||
<tr>
|
||||
<td>{{ name }}</td>
|
||||
<td>{{ info.version }}</td>
|
||||
<td>{{ info.source }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</center>
|
||||
</body>
|
||||
</html>
|
|
@ -0,0 +1,173 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import glob
|
||||
import os
|
||||
import argparse
|
||||
from collections import Counter
|
||||
from subprocess import check_call, CalledProcessError
|
||||
|
||||
root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", ".."))
|
||||
|
||||
|
||||
def pip_command(command, additional_dir=".", error_ok=False):
|
||||
try:
|
||||
print("Executing: {} from {}".format(command, additional_dir))
|
||||
check_call(
|
||||
[sys.executable, "-m", "pip"] + command.split(),
|
||||
cwd=os.path.join(root_dir, additional_dir),
|
||||
)
|
||||
print()
|
||||
except CalledProcessError as err:
|
||||
print(err, file=sys.stderr)
|
||||
if not error_ok:
|
||||
sys.exit(1)
|
||||
|
||||
def select_install_type(pkg, run_develop, exceptions):
|
||||
# the default for disable_develop will be false, which means `run_develop` will be true
|
||||
argument = ""
|
||||
if run_develop:
|
||||
argument = "-e"
|
||||
|
||||
if pkg in exceptions:
|
||||
# opposite of whatever our decision was
|
||||
if argument == "":
|
||||
argument = "-e"
|
||||
elif argument == "-e":
|
||||
argument = ""
|
||||
|
||||
return argument
|
||||
|
||||
# optional argument in a situation where we want to build a variable subset of packages
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Set up the dev environment for selected packages."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--packageList",
|
||||
"-p",
|
||||
dest="packageList",
|
||||
default="",
|
||||
help="Comma separated list of targeted packages. Used to limit the number of packages that dependencies will be installed for.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--disabledevelop",
|
||||
dest="install_in_develop_mode",
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="Add this argument if you would prefer to install the package with a simple `pip install` versus `pip install -e`",
|
||||
)
|
||||
# this is a hack to support generating docs for the single package that doesn't support develop mode. It will be removed when we
|
||||
# migrate to generating docs on a per-package cadence.
|
||||
parser.add_argument(
|
||||
"--exceptionlist",
|
||||
"-e",
|
||||
dest="exception_list",
|
||||
default="",
|
||||
help="Comma separated list of packages that we want to take the 'opposite' installation method for.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
packages = {
|
||||
tuple(os.path.dirname(f).rsplit(os.sep, 1))
|
||||
for f in glob.glob("sdk/*/azure-*/setup.py") + glob.glob("tools/azure-*/setup.py")
|
||||
}
|
||||
# [(base_folder, package_name), ...] to {package_name: base_folder, ...}
|
||||
packages = {package_name: base_folder for (base_folder, package_name) in packages}
|
||||
|
||||
exceptions = [p.strip() for p in args.exception_list.split(',')]
|
||||
|
||||
# keep targeted packages separate. python2 needs the nspkgs to work properly.
|
||||
if not args.packageList:
|
||||
targeted_packages = list(packages.keys())
|
||||
else:
|
||||
targeted_packages = [
|
||||
os.path.relpath(x.strip()) for x in args.packageList.split(",")
|
||||
]
|
||||
|
||||
# Extract nspkg and sort nspkg by number of "-"
|
||||
nspkg_packages = [p for p in packages.keys() if "nspkg" in p]
|
||||
nspkg_packages.sort(key=lambda x: len([c for c in x if c == "-"]))
|
||||
|
||||
# Manually push meta-packages at the end, in reverse dependency order
|
||||
meta_packages = ["azure-keyvault", "azure-mgmt", "azure"]
|
||||
|
||||
content_packages = sorted(
|
||||
[
|
||||
p
|
||||
for p in packages.keys()
|
||||
if p not in nspkg_packages + meta_packages and p in targeted_packages
|
||||
]
|
||||
)
|
||||
|
||||
# Install tests dep first
|
||||
if "azure-devtools" in content_packages:
|
||||
content_packages.remove("azure-devtools")
|
||||
content_packages.insert(0, "azure-devtools")
|
||||
|
||||
if "azure-sdk-tools" in content_packages:
|
||||
content_packages.remove("azure-sdk-tools")
|
||||
content_packages.insert(1, "azure-sdk-tools")
|
||||
|
||||
# Put azure-common in front of content package
|
||||
if "azure-common" in content_packages:
|
||||
content_packages.remove("azure-common")
|
||||
content_packages.insert(2, "azure-common")
|
||||
|
||||
if 'azure-core' in content_packages:
|
||||
content_packages.remove('azure-core')
|
||||
content_packages.insert(3, 'azure-core')
|
||||
|
||||
|
||||
print("Running dev setup...")
|
||||
print("Root directory '{}'\n".format(root_dir))
|
||||
|
||||
# install private whls if there are any
|
||||
privates_dir = os.path.join(root_dir, "privates")
|
||||
if os.path.isdir(privates_dir) and os.listdir(privates_dir):
|
||||
whl_list = " ".join(
|
||||
[os.path.join(privates_dir, f) for f in os.listdir(privates_dir)]
|
||||
)
|
||||
pip_command("install {}".format(whl_list))
|
||||
|
||||
# install nspkg only on py2, but in wheel mode (not editable mode)
|
||||
if sys.version_info < (3,):
|
||||
for package_name in nspkg_packages:
|
||||
pip_command("install {}/{}/".format(packages[package_name], package_name))
|
||||
|
||||
|
||||
|
||||
# install packages
|
||||
print("Packages to install: {}".format(content_packages))
|
||||
for package_name in content_packages:
|
||||
print("\nInstalling {}".format(package_name))
|
||||
# if we are running dev_setup with no arguments. going after dev_requirements will be a pointless exercise
|
||||
# and waste of cycles as all the dependencies will be installed regardless.
|
||||
if os.path.isfile(
|
||||
"{}/{}/dev_requirements.txt".format(packages[package_name], package_name)
|
||||
):
|
||||
pip_command(
|
||||
"install -r dev_requirements.txt",
|
||||
os.path.join(packages[package_name], package_name),
|
||||
)
|
||||
|
||||
pip_command(
|
||||
"install --ignore-requires-python {} {}".format(
|
||||
select_install_type(package_name, args.install_in_develop_mode, exceptions),
|
||||
os.path.join(packages[package_name], package_name)
|
||||
)
|
||||
)
|
||||
|
||||
# On Python 3, uninstall azure-nspkg if he got installed
|
||||
if sys.version_info >= (3,):
|
||||
pip_command("uninstall -y azure-nspkg", error_ok=True)
|
||||
|
||||
print("Finished dev setup.")
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
# Normally, this module will be executed as referenced as part of the devops build definitions.
|
||||
# An enterprising user can easily glance over this and leverage for their own purposes.
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import os
|
||||
|
||||
from common_tasks import process_glob_string, run_check_call
|
||||
|
||||
root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", ".."))
|
||||
build_packing_script_location = os.path.join(root_dir, "build_package.py")
|
||||
|
||||
|
||||
def build_packages(targeted_packages, distribution_directory):
|
||||
# run the build and distribution
|
||||
for package_name in targeted_packages:
|
||||
print(package_name)
|
||||
print("Generating Package Using Python {}".format(sys.version))
|
||||
run_check_call(
|
||||
[
|
||||
"python",
|
||||
build_packing_script_location,
|
||||
"--dest",
|
||||
distribution_directory,
|
||||
package_name,
|
||||
],
|
||||
root_dir,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Build Azure Packages, Called from DevOps YAML Pipeline"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--distribution-directory",
|
||||
dest="distribution_directory",
|
||||
help="The path to the distribution directory. Should be passed $(Build.ArtifactStagingDirectory) from the devops yaml definition.",
|
||||
required=True,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"glob_string",
|
||||
nargs="?",
|
||||
help=(
|
||||
"A comma separated list of glob strings that will target the top level directories that contain packages. "
|
||||
'Examples: All == "azure-*", Single = "azure-keyvault"'
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--service",
|
||||
help=(
|
||||
"Name of service directory (under sdk/) to build."
|
||||
"Example: --service applicationinsights"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--pkgfilter",
|
||||
default="",
|
||||
dest="package_filter_string",
|
||||
help=(
|
||||
"An additional string used to filter the set of artifacts by a simple CONTAINS clause. This filters packages AFTER the set is built with compatibility and omission lists accounted."
|
||||
),
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# We need to support both CI builds of everything and individual service
|
||||
# folders. This logic allows us to do both.
|
||||
if args.service:
|
||||
service_dir = os.path.join("sdk", args.service)
|
||||
target_dir = os.path.join(root_dir, service_dir)
|
||||
else:
|
||||
target_dir = root_dir
|
||||
|
||||
targeted_packages = process_glob_string(args.glob_string, target_dir, args.package_filter_string)
|
||||
build_packages(targeted_packages, args.distribution_directory)
|
|
@ -0,0 +1,412 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
# Below are common methods for the devops build steps. This is the common location that will be updated with
|
||||
# package targeting during release.
|
||||
|
||||
import glob
|
||||
from subprocess import check_call, CalledProcessError
|
||||
import os
|
||||
import errno
|
||||
import shutil
|
||||
import sys
|
||||
import logging
|
||||
import ast
|
||||
import textwrap
|
||||
import io
|
||||
import re
|
||||
import pdb
|
||||
|
||||
# Assumes the presence of setuptools
|
||||
from pkg_resources import parse_version, parse_requirements, Requirement
|
||||
|
||||
# this assumes the presence of "packaging"
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.version import Version
|
||||
|
||||
|
||||
DEV_REQ_FILE = "dev_requirements.txt"
|
||||
NEW_DEV_REQ_FILE = "new_dev_requirements.txt"
|
||||
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
OMITTED_CI_PACKAGES = [
|
||||
"azure-mgmt-documentdb",
|
||||
"azure-servicemanagement-legacy",
|
||||
"azure-mgmt-scheduler",
|
||||
]
|
||||
MANAGEMENT_PACKAGE_IDENTIFIERS = [
|
||||
"mgmt",
|
||||
"azure-cognitiveservices",
|
||||
"azure-servicefabric",
|
||||
"nspkg",
|
||||
"azure-keyvault",
|
||||
"azure-synapse"
|
||||
]
|
||||
META_PACKAGES = ["azure", "azure-mgmt", "azure-keyvault"]
|
||||
REGRESSION_EXCLUDED_PACKAGES = [
|
||||
"azure-common",
|
||||
]
|
||||
|
||||
MANAGEMENT_PACKAGES_FILTER_EXCLUSIONS = [
|
||||
"azure-mgmt-core",
|
||||
]
|
||||
|
||||
omit_regression = (
|
||||
lambda x: "nspkg" not in x
|
||||
and "mgmt" not in x
|
||||
and os.path.basename(x) not in MANAGEMENT_PACKAGE_IDENTIFIERS
|
||||
and os.path.basename(x) not in META_PACKAGES
|
||||
and os.path.basename(x) not in REGRESSION_EXCLUDED_PACKAGES
|
||||
)
|
||||
omit_docs = lambda x: "nspkg" not in x and os.path.basename(x) not in META_PACKAGES
|
||||
omit_build = lambda x: x # Dummy lambda to match omit type
|
||||
lambda_filter_azure_pkg = lambda x: x.startswith("azure") and "-nspkg" not in x
|
||||
omit_mgmt = lambda x: "mgmt" not in x or os.path.basename(x) in MANAGEMENT_PACKAGES_FILTER_EXCLUSIONS
|
||||
|
||||
# dict of filter type and filter function
|
||||
omit_funct_dict = {
|
||||
"Build": omit_build,
|
||||
"Docs": omit_docs,
|
||||
"Regression": omit_regression,
|
||||
"Omit_management": omit_mgmt,
|
||||
}
|
||||
|
||||
def log_file(file_location, is_error=False):
|
||||
with open(file_location, "r") as file:
|
||||
for line in file:
|
||||
sys.stdout.write(line)
|
||||
sys.stdout.write("\n")
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
def read_file(file_location):
|
||||
str_buffer = ""
|
||||
with open(file_location, "r") as file:
|
||||
for line in file:
|
||||
str_buffer += line
|
||||
return str_buffer
|
||||
|
||||
|
||||
def cleanup_folder(target_folder):
|
||||
for file in os.listdir(target_folder):
|
||||
file_path = os.path.join(target_folder, file)
|
||||
try:
|
||||
if os.path.isfile(file_path):
|
||||
os.remove(file_path)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
|
||||
|
||||
# helper functions
|
||||
def clean_coverage(coverage_dir):
|
||||
try:
|
||||
os.mkdir(coverage_dir)
|
||||
except OSError as e:
|
||||
if e.errno == errno.EEXIST:
|
||||
logging.info("Coverage dir already exists. Cleaning.")
|
||||
cleanup_folder(coverage_dir)
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def parse_setup(setup_path):
|
||||
setup_filename = os.path.join(setup_path, "setup.py")
|
||||
mock_setup = textwrap.dedent(
|
||||
"""\
|
||||
def setup(*args, **kwargs):
|
||||
__setup_calls__.append((args, kwargs))
|
||||
"""
|
||||
)
|
||||
parsed_mock_setup = ast.parse(mock_setup, filename=setup_filename)
|
||||
with io.open(setup_filename, "r", encoding="utf-8-sig") as setup_file:
|
||||
parsed = ast.parse(setup_file.read())
|
||||
for index, node in enumerate(parsed.body[:]):
|
||||
if (
|
||||
not isinstance(node, ast.Expr)
|
||||
or not isinstance(node.value, ast.Call)
|
||||
or not hasattr(node.value.func, "id")
|
||||
or node.value.func.id != "setup"
|
||||
):
|
||||
continue
|
||||
parsed.body[index:index] = parsed_mock_setup.body
|
||||
break
|
||||
|
||||
fixed = ast.fix_missing_locations(parsed)
|
||||
codeobj = compile(fixed, setup_filename, "exec")
|
||||
local_vars = {}
|
||||
global_vars = {"__setup_calls__": []}
|
||||
current_dir = os.getcwd()
|
||||
working_dir = os.path.dirname(setup_filename)
|
||||
os.chdir(working_dir)
|
||||
exec(codeobj, global_vars, local_vars)
|
||||
os.chdir(current_dir)
|
||||
_, kwargs = global_vars["__setup_calls__"][0]
|
||||
|
||||
try:
|
||||
python_requires = kwargs["python_requires"]
|
||||
# most do not define this, fall back to what we define as universal
|
||||
except KeyError as e:
|
||||
python_requires = ">=2.7"
|
||||
|
||||
version = kwargs["version"]
|
||||
name = kwargs["name"]
|
||||
|
||||
requires = []
|
||||
if "install_requires" in kwargs:
|
||||
requires = kwargs["install_requires"]
|
||||
|
||||
return name, version, python_requires, requires
|
||||
|
||||
|
||||
def parse_requirements_file(file_location):
|
||||
with open(file_location, "r") as f:
|
||||
reqs = f.read()
|
||||
|
||||
return dict((req.name, req) for req in parse_requirements(reqs))
|
||||
|
||||
|
||||
def parse_setup_requires(setup_path):
|
||||
_, _, python_requires, _ = parse_setup(setup_path)
|
||||
|
||||
return python_requires
|
||||
|
||||
|
||||
def filter_for_compatibility(package_set):
|
||||
collected_packages = []
|
||||
v = sys.version_info
|
||||
running_major_version = Version(".".join([str(v[0]), str(v[1]), str(v[2])]))
|
||||
|
||||
for pkg in package_set:
|
||||
spec_set = SpecifierSet(parse_setup_requires(pkg))
|
||||
|
||||
if running_major_version in spec_set:
|
||||
collected_packages.append(pkg)
|
||||
|
||||
return collected_packages
|
||||
|
||||
|
||||
# this function is where a glob string gets translated to a list of packages
|
||||
# It is called by both BUILD (package) and TEST. In the future, this function will be the central location
|
||||
# for handling targeting of release packages
|
||||
def process_glob_string(
|
||||
glob_string,
|
||||
target_root_dir,
|
||||
additional_contains_filter="",
|
||||
filter_type="Build",
|
||||
):
|
||||
if glob_string:
|
||||
individual_globs = glob_string.split(",")
|
||||
else:
|
||||
individual_globs = "azure-*"
|
||||
collected_top_level_directories = []
|
||||
|
||||
for glob_string in individual_globs:
|
||||
globbed = glob.glob(
|
||||
os.path.join(target_root_dir, glob_string, "setup.py")
|
||||
) + glob.glob(os.path.join(target_root_dir, "sdk/*/", glob_string, "setup.py"))
|
||||
collected_top_level_directories.extend([os.path.dirname(p) for p in globbed])
|
||||
|
||||
# dedup, in case we have double coverage from the glob strings. Example: "azure-mgmt-keyvault,azure-mgmt-*"
|
||||
collected_directories = list(
|
||||
set(
|
||||
[
|
||||
p
|
||||
for p in collected_top_level_directories
|
||||
if additional_contains_filter in p
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
# if we have individually queued this specific package, it's obvious that we want to build it specifically
|
||||
# in this case, do not honor the omission list
|
||||
if len(collected_directories) == 1:
|
||||
pkg_set_ci_filtered = filter_for_compatibility(collected_directories)
|
||||
# however, if there are multiple packages being built, we should honor the omission list and NOT build the omitted
|
||||
# packages
|
||||
else:
|
||||
allowed_package_set = remove_omitted_packages(collected_directories)
|
||||
pkg_set_ci_filtered = filter_for_compatibility(allowed_package_set)
|
||||
|
||||
# Apply filter based on filter type. for e.g. Docs, Regression, Management
|
||||
pkg_set_ci_filtered = list(filter(omit_funct_dict.get(filter_type, omit_build), pkg_set_ci_filtered))
|
||||
logging.info(
|
||||
"Target packages after filtering by CI: {}".format(
|
||||
pkg_set_ci_filtered
|
||||
)
|
||||
)
|
||||
logging.info(
|
||||
"Package(s) omitted by CI filter: {}".format(
|
||||
list(set(collected_directories) - set(pkg_set_ci_filtered))
|
||||
)
|
||||
)
|
||||
return sorted(pkg_set_ci_filtered)
|
||||
|
||||
|
||||
def remove_omitted_packages(collected_directories):
|
||||
packages = [
|
||||
package_dir
|
||||
for package_dir in collected_directories
|
||||
if os.path.basename(package_dir) not in OMITTED_CI_PACKAGES
|
||||
]
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def run_check_call(
|
||||
command_array,
|
||||
working_directory,
|
||||
acceptable_return_codes=[],
|
||||
run_as_shell=False,
|
||||
always_exit=True,
|
||||
):
|
||||
try:
|
||||
if run_as_shell:
|
||||
logging.info(
|
||||
"Command Array: {0}, Target Working Directory: {1}".format(
|
||||
" ".join(command_array), working_directory
|
||||
)
|
||||
)
|
||||
check_call(" ".join(command_array), cwd=working_directory, shell=True)
|
||||
else:
|
||||
logging.info(
|
||||
"Command Array: {0}, Target Working Directory: {1}".format(
|
||||
command_array, working_directory
|
||||
)
|
||||
)
|
||||
check_call(command_array, cwd=working_directory)
|
||||
except CalledProcessError as err:
|
||||
if err.returncode not in acceptable_return_codes:
|
||||
logging.error(err) # , file = sys.stderr
|
||||
if always_exit:
|
||||
exit(1)
|
||||
else:
|
||||
return err
|
||||
|
||||
|
||||
# This function generates code coverage parameters
|
||||
def create_code_coverage_params(parsed_args, package_name):
|
||||
coverage_args = []
|
||||
if parsed_args.disablecov:
|
||||
logging.info("Code coverage disabled as per the flag(--disablecov)")
|
||||
coverage_args.append("--no-cov")
|
||||
else:
|
||||
current_package_name = package_name.replace("-", ".")
|
||||
coverage_args.append("--cov={}".format(current_package_name))
|
||||
logging.info(
|
||||
"Code coverage is enabled for package {0}, pytest arguements: {1}".format(
|
||||
current_package_name, coverage_args
|
||||
)
|
||||
)
|
||||
return coverage_args
|
||||
|
||||
|
||||
# This function returns if error code 5 is allowed for a given package
|
||||
def is_error_code_5_allowed(target_pkg, pkg_name):
|
||||
if (
|
||||
all(
|
||||
map(
|
||||
lambda x: any(
|
||||
[pkg_id in x for pkg_id in MANAGEMENT_PACKAGE_IDENTIFIERS]
|
||||
),
|
||||
[target_pkg],
|
||||
)
|
||||
)
|
||||
or pkg_name in MANAGEMENT_PACKAGE_IDENTIFIERS
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
# This function parses requirement and return package name and specifier
|
||||
def parse_require(req):
|
||||
req_object = Requirement.parse(req)
|
||||
pkg_name = req_object.key
|
||||
spec = SpecifierSet(str(req_object).replace(pkg_name, ""))
|
||||
return [pkg_name, spec]
|
||||
|
||||
def find_whl(package_name, version, whl_directory):
|
||||
if not os.path.exists(whl_directory):
|
||||
logging.error("Whl directory is incorrect")
|
||||
exit(1)
|
||||
|
||||
logging.info("Searching whl for package {}".format(package_name))
|
||||
whl_name = "{0}-{1}*.whl".format(package_name.replace("-", "_"), version)
|
||||
paths = glob.glob(os.path.join(whl_directory, whl_name))
|
||||
if not paths:
|
||||
logging.error(
|
||||
"whl is not found in whl directory {0} for package {1}".format(
|
||||
whl_directory, package_name
|
||||
)
|
||||
)
|
||||
exit(1)
|
||||
|
||||
return paths[0]
|
||||
|
||||
# This method installs package from a pre-built whl
|
||||
def install_package_from_whl(
|
||||
package_whl_path, working_dir, python_sym_link=sys.executable
|
||||
):
|
||||
commands = [python_sym_link, "-m", "pip", "install", package_whl_path]
|
||||
run_check_call(commands, working_dir)
|
||||
logging.info("Installed package from {}".format(package_whl_path))
|
||||
|
||||
|
||||
def filter_dev_requirements(pkg_root_path, packages_to_exclude, dest_dir):
|
||||
# This method returns list of requirements from dev_requirements by filtering out packages in given list
|
||||
dev_req_path = os.path.join(pkg_root_path, DEV_REQ_FILE)
|
||||
if not os.path.exists(dev_req_path):
|
||||
logging.info("{0} is not found in package root {1}".format(DEV_REQ_FILE, pkg_root_path))
|
||||
return ""
|
||||
|
||||
requirements = []
|
||||
with open(dev_req_path, "r") as dev_req_file:
|
||||
requirements = dev_req_file.readlines()
|
||||
|
||||
# filter any package given in excluded list
|
||||
requirements = [
|
||||
req
|
||||
for req in requirements
|
||||
if os.path.basename(req.replace("\n", "")) not in packages_to_exclude
|
||||
]
|
||||
|
||||
logging.info("Filtered dev requirements: {}".format(requirements))
|
||||
# create new dev requirements file with different name for filtered requirements
|
||||
new_dev_req_path = os.path.join(dest_dir, NEW_DEV_REQ_FILE)
|
||||
with open(new_dev_req_path, "w") as dev_req_file:
|
||||
dev_req_file.writelines(requirements)
|
||||
|
||||
return new_dev_req_path
|
||||
|
||||
def is_required_version_on_pypi(package_name, spec):
|
||||
from pypi_tools.pypi import PyPIClient
|
||||
client = PyPIClient()
|
||||
versions = []
|
||||
try:
|
||||
versions = [str(v) for v in client.get_ordered_versions(package_name) if str(v) in spec]
|
||||
except:
|
||||
logging.error("Package {} is not found on PyPI", package_name)
|
||||
return versions
|
||||
|
||||
def find_packages_missing_on_pypi(path):
|
||||
import pkginfo
|
||||
requires = []
|
||||
if path.endswith(".whl"):
|
||||
requires = list(filter(lambda_filter_azure_pkg, pkginfo.get_metadata(path).requires_dist))
|
||||
else:
|
||||
_, _, _, requires = parse_setup(path)
|
||||
|
||||
# parse pkg name and spec
|
||||
pkg_spec_dict = dict(parse_require(req) for req in requires)
|
||||
logging.info("Package requirement: {}".format(pkg_spec_dict))
|
||||
# find if version is available on pypi
|
||||
missing_packages = ["{0}{1}".format(pkg, pkg_spec_dict[pkg]) for pkg in pkg_spec_dict.keys() if not is_required_version_on_pypi(pkg, pkg_spec_dict[pkg])]
|
||||
if missing_packages:
|
||||
logging.error("Packages not found on PyPI: {}".format(missing_packages))
|
||||
return missing_packages
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
|
||||
param (
|
||||
$workingDir,
|
||||
$version
|
||||
)
|
||||
$RELEASE_TITLE_REGEX = "(?<releaseNoteTitle>^\#+.*(?<version>\b\d+\.\d+\.\d+([^0-9\s][^\s:]+)?))"
|
||||
|
||||
|
||||
function ExtractReleaseNotes($changeLogLocation)
|
||||
{
|
||||
$releaseNotes = @{}
|
||||
$contentArrays = @{}
|
||||
if ($changeLogLocation.Length -eq 0)
|
||||
{
|
||||
return $releaseNotes
|
||||
}
|
||||
|
||||
try {
|
||||
$contents = Get-Content $changeLogLocation
|
||||
|
||||
# walk the document, finding where the version specifiers are and creating lists
|
||||
$version = ""
|
||||
Write-Host "Versions in change log $changeLogLocation"
|
||||
foreach($line in $contents){
|
||||
if ($line -match $RELEASE_TITLE_REGEX)
|
||||
{
|
||||
Write-Host $line
|
||||
$version = $matches["version"]
|
||||
$contentArrays[$version] = @()
|
||||
}
|
||||
|
||||
$contentArrays[$version] += $line
|
||||
}
|
||||
|
||||
# resolve each of discovered version specifier string arrays into real content
|
||||
foreach($key in $contentArrays.Keys)
|
||||
{
|
||||
$releaseNotes[$key] = New-Object PSObject -Property @{
|
||||
ReleaseVersion = $key
|
||||
ReleaseContent = $contentArrays[$key] -join [Environment]::NewLine
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
Write-Host "Error parsing $changeLogLocation."
|
||||
Write-Host $_.Exception.Message
|
||||
}
|
||||
|
||||
return $releaseNotes
|
||||
}
|
||||
|
||||
|
||||
function VerifyPackages($rootDirectory)
|
||||
{
|
||||
#This function verifies version in CHANGELOG.md for a given package
|
||||
try
|
||||
{
|
||||
$historyFiles = Get-ChildItem -Path $rootDirectory -Recurse -Include "CHANGELOG.md"
|
||||
if ($historyFiles -eq $null)
|
||||
{
|
||||
Write-Host "Change log file is missing for package"
|
||||
exit(1)
|
||||
}
|
||||
|
||||
#Find current version of package from _version.py and package name from setup.py
|
||||
$changeFile = @($historyFiles)[0]
|
||||
#Get Version and release notes in each change log files
|
||||
$releaseNotes = ExtractReleaseNotes -changeLogLocation $changeFile
|
||||
if ($releaseNotes.Count -gt 0)
|
||||
{
|
||||
#Log package if it doesn't have current version in change log
|
||||
if ( $releaseNotes.Contains($version))
|
||||
{
|
||||
$content = $releaseNotes[$version]
|
||||
Write-Host "Change log [$changeFile] is updated with current version $version"
|
||||
Write-Host "Release notes for version $version"
|
||||
Write-Host "****************************************************************************************************"
|
||||
Write-Host $content.ReleaseContent
|
||||
Write-Host "****************************************************************************************************"
|
||||
}
|
||||
else
|
||||
{
|
||||
Write-Host "Change log [$changeFile] does not have current version $version"
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
Write-Host "Error verifying version in change log"
|
||||
Write-Host $_.Exception.Message
|
||||
exit(1)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (($workingDir -eq $null) -or ($version -eq $null))
|
||||
{
|
||||
Write-Host "Invalid arguements. workingDir and version are mandatory arguements"
|
||||
exit(1)
|
||||
}
|
||||
|
||||
VerifyPackages -rootDirectory $workingDir
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
from packaging.version import parse
|
||||
from common_tasks import run_check_call
|
||||
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
# Oldest release of SDK packages that should be skipped
|
||||
EXCLUDED_PACKAGE_VERSIONS = {
|
||||
'azure-storage-file-share': ['12.0.0', '12.0.0b5'],
|
||||
'azure-storage-queue': ['0.37.0', '1.0.0', '1.1.0', '1.2.0rc1', '1.3.0', '1.4.0', '2.0.0', '2.0.1', '2.1.0'],
|
||||
'azure-storage-file': ['0.37.0', '1.0.0', '1.1.0', '1.2.0rc1', '1.3.0', '1.3.1', '1.4.0', '2.0.0', '2.0.1', '2.1.0'],
|
||||
'azure-storage-blob': ['0.37.0', '0.37.1', '1.0.0', '1.1.0', '1.2.0rc1', '1.3.0', '1.3.1', '1.4.0', '1.5.0', '2.0.0', '2.0.1', '2.1.0',],
|
||||
'azure-eventhub': ['0.2.0', '1.0.0', '1.1.0', '1.1.1', '1.2.0rc1', '1.2.0', '1.3.0', '1.3.1', '1.3.2', '1.3.3',],
|
||||
'azure-cosmos': ['3.0.0', '3.0.1', '3.0.2', '3.1.0', '3.1.1', '3.1.2'],
|
||||
}
|
||||
|
||||
# This method identifies release tag for latest or oldest released version of a given package
|
||||
def get_release_tag(dep_pkg_name, isLatest):
|
||||
# get versions from pypi and find latest
|
||||
# delayed import until sdk tools are installed on virtual env
|
||||
from pypi_tools.pypi import PyPIClient
|
||||
|
||||
client = PyPIClient()
|
||||
versions = []
|
||||
try:
|
||||
versions = [str(v) for v in client.get_ordered_versions(dep_pkg_name)]
|
||||
logging.info("Versions available on PyPI for {0} are: {1}".format(dep_pkg_name, versions))
|
||||
except:
|
||||
logging.error("Package {} is not available on PyPI".format(dep_pkg_name))
|
||||
return None
|
||||
|
||||
# filter excluded versions
|
||||
if dep_pkg_name in EXCLUDED_PACKAGE_VERSIONS:
|
||||
versions = [v for v in versions if v not in EXCLUDED_PACKAGE_VERSIONS[dep_pkg_name]]
|
||||
logging.info("Filtered versions for {0} is: {1}".format(dep_pkg_name, versions))
|
||||
|
||||
if not versions:
|
||||
logging.info(
|
||||
"Released version info for package {} is not available".format(dep_pkg_name)
|
||||
)
|
||||
# This is not a hard error. We can run into this situation when a new package is added to repo and not yet released
|
||||
return
|
||||
|
||||
# find latest version
|
||||
logging.info("Looking for {} released version".format("Latest" if isLatest == True else "Oldest"))
|
||||
if isLatest == True:
|
||||
versions.reverse()
|
||||
else:
|
||||
# find oldest GA version by filtering out all preview versions
|
||||
versions = [ v for v in versions if parse(v).is_prerelease == False]
|
||||
if(len(versions) <2):
|
||||
logging.info("Only one or no released GA version found for package {}".format(dep_pkg_name))
|
||||
return
|
||||
|
||||
version = versions[0]
|
||||
|
||||
# create tag in <pkg_name>_version format
|
||||
tag_name = "{0}_{1}".format(dep_pkg_name, version)
|
||||
logging.info(
|
||||
"Release tag for package [{0}] is [{1}]".format(dep_pkg_name, tag_name)
|
||||
)
|
||||
return tag_name
|
||||
|
||||
|
||||
# This method checkouts a given tag of sdk repo
|
||||
def git_checkout_tag(tag_name, working_dir):
|
||||
# fetch tags
|
||||
run_check_call(["git", "fetch", "origin", "tag", tag_name], working_dir)
|
||||
|
||||
logging.info("checkout git repo with tag {}".format(tag_name))
|
||||
commands = ["git", "checkout", "tags/{}".format(tag_name)]
|
||||
run_check_call(commands, working_dir)
|
||||
logging.info("Code with tag {} is checked out successfully".format(tag_name))
|
||||
|
||||
|
||||
# This method checkouts a given tag of sdk repo
|
||||
def git_checkout_branch(branch_name, working_dir):
|
||||
# fetch tags
|
||||
run_check_call(["git", "fetch", "origin", branch_name], working_dir)
|
||||
try:
|
||||
run_check_call(["git", "branch", branch_name, "FETCH_HEAD"], working_dir)
|
||||
except:
|
||||
logging.error("Failed to create branch. But this can happen if a branch already exists so ignoring this error")
|
||||
logging.info("checkout git repo with branch {}".format(branch_name))
|
||||
commands = ["git", "checkout", branch_name]
|
||||
run_check_call(commands, working_dir)
|
||||
logging.info("Repo with branch name {} is checked out successfully".format(branch_name))
|
||||
|
||||
|
||||
def clone_repo(dest_dir, repo_url):
|
||||
if not os.path.isdir(dest_dir):
|
||||
logging.error(
|
||||
"Invalid destination directory to clone git repo:[{}]".format(dest_dir)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
logging.info("cloning git repo using url {}".format(repo_url))
|
||||
run_check_call(["git", "clone", "--depth=1", repo_url], dest_dir)
|
|
@ -0,0 +1,311 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
# This script is the primary entry point for the azure-sdk-for-python Devops CI commands
|
||||
# Primarily, it figures out which packages need to be built by combining the targeting string with the servicedir argument.
|
||||
# After this, it either executes a global install of all packages followed by a test, or a tox invocation per package collected.
|
||||
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import os
|
||||
import errno
|
||||
import shutil
|
||||
import glob
|
||||
import logging
|
||||
import pdb
|
||||
from common_tasks import (
|
||||
process_glob_string,
|
||||
run_check_call,
|
||||
cleanup_folder,
|
||||
clean_coverage,
|
||||
is_error_code_5_allowed,
|
||||
create_code_coverage_params,
|
||||
)
|
||||
from tox_harness import prep_and_run_tox
|
||||
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", ".."))
|
||||
coverage_dir = os.path.join(root_dir, "_coverage/")
|
||||
dev_setup_script_location = os.path.join(root_dir, "scripts/dev_setup.py")
|
||||
|
||||
def combine_coverage_files(coverage_files):
|
||||
# find tox.ini file. tox.ini is used to combine coverage paths to generate formatted report
|
||||
tox_ini_file = os.path.join(root_dir, "eng", "tox", "tox.ini")
|
||||
config_file_flag = "--rcfile={}".format(tox_ini_file)
|
||||
|
||||
if os.path.isfile(tox_ini_file):
|
||||
# for every individual coverage file, run coverage combine to combine path
|
||||
for coverage_file in coverage_files:
|
||||
cov_cmd_array = [sys.executable, "-m", "coverage", "combine"]
|
||||
# tox.ini file has coverage paths to combine
|
||||
# Pas tox.ini as coverage config file
|
||||
cov_cmd_array.extend([config_file_flag, coverage_file])
|
||||
run_check_call(cov_cmd_array, root_dir)
|
||||
else:
|
||||
# not a hard error at this point
|
||||
# this combine step is required only for modules if report has package name starts with .tox
|
||||
logging.error("tox.ini is not found in path {}".format(root_dir))
|
||||
|
||||
def collect_pytest_coverage_files(targeted_packages):
|
||||
coverage_files = []
|
||||
# generate coverage files
|
||||
for package_dir in [package for package in targeted_packages]:
|
||||
coverage_file = os.path.join(
|
||||
coverage_dir, ".coverage_{}".format(os.path.basename(package_dir))
|
||||
)
|
||||
if os.path.isfile(coverage_file):
|
||||
coverage_files.append(coverage_file)
|
||||
|
||||
logging.info("Visible uncombined .coverage files: {}".format(coverage_files))
|
||||
|
||||
if len(coverage_files):
|
||||
cov_cmd_array = ["coverage", "combine"]
|
||||
cov_cmd_array.extend(coverage_files)
|
||||
|
||||
# merge them with coverage combine and copy to root
|
||||
run_check_call(cov_cmd_array, coverage_dir)
|
||||
|
||||
source = os.path.join(coverage_dir, "./.coverage")
|
||||
dest = os.path.join(root_dir, ".coverage")
|
||||
|
||||
shutil.move(source, dest)
|
||||
|
||||
|
||||
def prep_tests(targeted_packages):
|
||||
logging.info("running test setup for {}".format(targeted_packages))
|
||||
run_check_call(
|
||||
[
|
||||
sys.executable,
|
||||
dev_setup_script_location,
|
||||
"--disabledevelop",
|
||||
"-p",
|
||||
",".join([os.path.basename(p) for p in targeted_packages])
|
||||
],
|
||||
root_dir,
|
||||
)
|
||||
|
||||
|
||||
def run_tests(targeted_packages, test_output_location, test_res, parsed_args):
|
||||
err_results = []
|
||||
|
||||
clean_coverage(coverage_dir)
|
||||
|
||||
# base command array without a targeted package
|
||||
command_array = [sys.executable, "-m", "pytest"]
|
||||
command_array.extend(test_res)
|
||||
|
||||
# loop through the packages
|
||||
logging.info("Running pytest for {}".format(targeted_packages))
|
||||
|
||||
for index, target_package in enumerate(targeted_packages):
|
||||
logging.info(
|
||||
"Running pytest for {}. {} of {}.".format(
|
||||
target_package, index, len(targeted_packages)
|
||||
)
|
||||
)
|
||||
|
||||
package_name = os.path.basename(target_package)
|
||||
source_coverage_file = os.path.join(root_dir, ".coverage")
|
||||
target_coverage_file = os.path.join(
|
||||
coverage_dir, ".coverage_{}".format(package_name)
|
||||
)
|
||||
target_package_options = []
|
||||
allowed_return_codes = []
|
||||
|
||||
local_command_array = command_array[:]
|
||||
|
||||
# Get code coverage params for current package
|
||||
coverage_commands = create_code_coverage_params(parsed_args, package_name)
|
||||
# Create local copy of params to pass to execute
|
||||
local_command_array.extend(coverage_commands)
|
||||
|
||||
# if we are targeting only packages that are management plane, it is a possibility
|
||||
# that no tests running is an acceptable situation
|
||||
# we explicitly handle this here.
|
||||
if is_error_code_5_allowed(target_package, package_name):
|
||||
allowed_return_codes.append(5)
|
||||
|
||||
# format test result output location
|
||||
if test_output_location:
|
||||
target_package_options.extend(
|
||||
[
|
||||
"--junitxml",
|
||||
os.path.join(
|
||||
"TestResults/{}/".format(package_name), test_output_location
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
target_package_options.append(target_package)
|
||||
err_result = run_check_call(
|
||||
local_command_array + target_package_options,
|
||||
root_dir,
|
||||
allowed_return_codes,
|
||||
True,
|
||||
False,
|
||||
)
|
||||
if err_result:
|
||||
logging.error("Errors present in {}".format(package_name))
|
||||
err_results.append(err_result)
|
||||
|
||||
if os.path.isfile(source_coverage_file):
|
||||
shutil.move(source_coverage_file, target_coverage_file)
|
||||
|
||||
if not parsed_args.disablecov:
|
||||
collect_pytest_coverage_files(targeted_packages)
|
||||
|
||||
# if any of the packages failed, we should get exit with errors
|
||||
if err_results:
|
||||
exit(1)
|
||||
|
||||
|
||||
def execute_global_install_and_test(
|
||||
parsed_args, targeted_packages, extended_pytest_args
|
||||
):
|
||||
if parsed_args.mark_arg:
|
||||
extended_pytest_args.extend(["-m", '"{}"'.format(parsed_args.mark_arg)])
|
||||
|
||||
if parsed_args.runtype == "setup" or parsed_args.runtype == "all":
|
||||
prep_tests(targeted_packages)
|
||||
|
||||
if parsed_args.runtype == "execute" or parsed_args.runtype == "all":
|
||||
run_tests(
|
||||
targeted_packages,
|
||||
parsed_args.test_results,
|
||||
extended_pytest_args,
|
||||
parsed_args,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Install Dependencies, Install Packages, Test Azure Packages, Called from DevOps YAML Pipeline"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"glob_string",
|
||||
nargs="?",
|
||||
help=(
|
||||
"A comma separated list of glob strings that will target the top level directories that contain packages."
|
||||
'Examples: All = "azure-*", Single = "azure-keyvault", Targeted Multiple = "azure-keyvault,azure-mgmt-resource"'
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--junitxml",
|
||||
dest="test_results",
|
||||
help=(
|
||||
"The folder where the test results will be stored in xml format."
|
||||
'Example: --junitxml="junit/test-results.xml"'
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--mark_arg",
|
||||
dest="mark_arg",
|
||||
help=(
|
||||
'The complete argument for `pytest -m "<input>"`. This can be used to exclude or include specific pytest markers.'
|
||||
'--mark_arg="not cosmosEmulator"'
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--disablecov", help=("Flag that disables code coverage."), action="store_true"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--tparallel",
|
||||
default=False,
|
||||
help=("Flag that enables parallel tox invocation."),
|
||||
action="store_true",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--tenvparallel",
|
||||
default=False,
|
||||
help=("Run individual tox env for each package in parallel."),
|
||||
action="store_true",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--service",
|
||||
help=(
|
||||
"Name of service directory (under sdk/) to test."
|
||||
"Example: --service applicationinsights"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-r", "--runtype", choices=["setup", "execute", "all", "none"], default="none"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-t",
|
||||
"--toxenv",
|
||||
dest="tox_env",
|
||||
help="Specific set of named environments to execute",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-w",
|
||||
"--wheel_dir",
|
||||
dest="wheel_dir",
|
||||
help="Location for prebuilt artifacts (if any)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-x",
|
||||
"--xdist",
|
||||
default=False,
|
||||
help=("Flag that enables xdist (requires pip install)"),
|
||||
action="store_true"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-i",
|
||||
"--injected-packages",
|
||||
dest="injected_packages",
|
||||
default="",
|
||||
help="Comma or space-separated list of packages that should be installed prior to dev_requirements. If local path, should be absolute.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--omit-management",
|
||||
dest="omit_management",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Flag that indicates to omit any management packages except any management packages that should not be filtered. for e.g azure-mgmt-core",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# We need to support both CI builds of everything and individual service
|
||||
# folders. This logic allows us to do both.
|
||||
if args.service:
|
||||
service_dir = os.path.join("sdk", args.service)
|
||||
target_dir = os.path.join(root_dir, service_dir)
|
||||
else:
|
||||
target_dir = root_dir
|
||||
|
||||
if args.omit_management:
|
||||
targeted_packages = process_glob_string(args.glob_string, target_dir, "", "Omit_management")
|
||||
else:
|
||||
targeted_packages = process_glob_string(args.glob_string, target_dir)
|
||||
extended_pytest_args = []
|
||||
|
||||
if len(targeted_packages) == 0:
|
||||
exit(0)
|
||||
|
||||
if args.xdist:
|
||||
extended_pytest_args.extend(["-n", "8", "--dist=loadscope"])
|
||||
|
||||
if args.runtype != "none":
|
||||
execute_global_install_and_test(args, targeted_packages, extended_pytest_args)
|
||||
else:
|
||||
prep_and_run_tox(targeted_packages, args, extended_pytest_args)
|
|
@ -0,0 +1,389 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
# This script will run regression test for packages which are added as required package by other packages
|
||||
# Regression test ensures backword compatibility with released dependent package versions
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
from common_tasks import (
|
||||
process_glob_string,
|
||||
parse_setup,
|
||||
run_check_call,
|
||||
parse_require,
|
||||
install_package_from_whl,
|
||||
filter_dev_requirements,
|
||||
find_packages_missing_on_pypi,
|
||||
find_whl
|
||||
)
|
||||
from git_helper import get_release_tag, git_checkout_tag, git_checkout_branch, clone_repo
|
||||
from pip._internal.operations import freeze
|
||||
|
||||
AZURE_GLOB_STRING = "azure*"
|
||||
|
||||
root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", ".."))
|
||||
test_tools_req_file = os.path.abspath(os.path.join(root_dir, "eng", "test_tools.txt"))
|
||||
|
||||
GIT_REPO_NAME = "azure-sdk-for-python"
|
||||
GIT_MASTER_BRANCH = "master"
|
||||
VENV_NAME = "regressionenv"
|
||||
AZURE_SDK_FOR_PYTHON_GIT_URL = "https://github.com/Azure/azure-sdk-for-python.git"
|
||||
TEMP_FOLDER_NAME = ".tmp_code_path"
|
||||
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
|
||||
class CustomVirtualEnv:
|
||||
def __init__(self, path):
|
||||
self.path = os.path.join(path, VENV_NAME)
|
||||
|
||||
def create(self):
|
||||
logging.info("Creating virtual environment [{}]".format(self.path))
|
||||
run_check_call([sys.executable, "-m", "venv", "ENV_DIR", self.path], root_dir)
|
||||
self.python_executable = self._find_python_executable()
|
||||
self.lib_paths = self._find_lib_paths()
|
||||
|
||||
def clear_venv(self):
|
||||
# clear any previously installed packages
|
||||
run_check_call(
|
||||
[sys.executable, "-m", "venv", "--clear", "ENV_DIR", self.path], root_dir
|
||||
)
|
||||
|
||||
def _find_python_executable(self):
|
||||
paths = glob.glob(os.path.join(self.path, "*", "python")) + glob.glob(os.path.join(self.path, "*", "python.exe"))
|
||||
if not paths:
|
||||
logging.error("Failed to find path to python executable in virtual env:{}".format(self.path))
|
||||
sys.exit(1)
|
||||
return paths[0]
|
||||
|
||||
def _find_lib_paths(self):
|
||||
paths = glob.glob(os.path.join(self.path, "*", "site-packages")) + glob.glob(os.path.join(self.path, "lib", "*", "site-packages"))
|
||||
if not paths:
|
||||
logging.error("Failed to find site-packages directory in virtual env:{}".format(self.path))
|
||||
sys.exit(1)
|
||||
return paths
|
||||
|
||||
|
||||
class RegressionContext:
|
||||
def __init__(self, whl_dir, tmp_path, is_latest, pytest_mark_arg):
|
||||
self.whl_directory = whl_dir
|
||||
self.temp_path = tmp_path
|
||||
self.is_latest_depend_test = is_latest
|
||||
self.venv = CustomVirtualEnv(self.temp_path)
|
||||
self.pytest_mark_arg = pytest_mark_arg
|
||||
self.venv.create()
|
||||
|
||||
def init_for_pkg(self, pkg_root):
|
||||
# This method is called each time context is switched to test regression for new package
|
||||
self.package_root_path = pkg_root
|
||||
self.package_name, self.pkg_version, _, _ = parse_setup(self.package_root_path)
|
||||
|
||||
def initialize(self, dep_pkg_root_path):
|
||||
self.dep_pkg_root_path = dep_pkg_root_path
|
||||
self.venv.clear_venv()
|
||||
|
||||
def deinitialize(self, dep_pkg_root_path):
|
||||
# This function can be used to reset code repo to master branch
|
||||
# Revert to master branch
|
||||
run_check_call(["git", "clean", "-fd"], dep_pkg_root_path)
|
||||
run_check_call(["git", "checkout", GIT_MASTER_BRANCH], dep_pkg_root_path)
|
||||
|
||||
|
||||
class RegressionTest:
|
||||
def __init__(self, context, package_dependency_dict):
|
||||
self.context = context
|
||||
self.package_dependency_dict = package_dependency_dict
|
||||
|
||||
def run(self):
|
||||
pkg_name = self.context.package_name
|
||||
if pkg_name in self.package_dependency_dict:
|
||||
logging.info("Running regression test for {}".format(pkg_name))
|
||||
self.whl_path = find_whl(pkg_name, self.context.pkg_version, self.context.whl_directory)
|
||||
if find_packages_missing_on_pypi(self.whl_path):
|
||||
logging.error("Required packages are not available on PyPI. Skipping regression test")
|
||||
exit(0)
|
||||
|
||||
dep_packages = self.package_dependency_dict[pkg_name]
|
||||
logging.info("Dependent packages for [{0}]: {1}".format(pkg_name, dep_packages))
|
||||
for dep_pkg_path in dep_packages:
|
||||
dep_pkg_name, _, _, _ = parse_setup(dep_pkg_path)
|
||||
logging.info(
|
||||
"Starting regression test of {0} against released {1}".format(
|
||||
pkg_name, dep_pkg_name
|
||||
)
|
||||
)
|
||||
self._run_test(dep_pkg_path)
|
||||
logging.info(
|
||||
"Completed regression test of {0} against released {1}".format(
|
||||
pkg_name, dep_pkg_name
|
||||
)
|
||||
)
|
||||
|
||||
logging.info("Completed regression test for {}".format(pkg_name))
|
||||
else:
|
||||
logging.info(
|
||||
"Package {} is not added as required by any package".format(pkg_name)
|
||||
)
|
||||
|
||||
def _run_test(self, dep_pkg_path):
|
||||
self.context.initialize(dep_pkg_path)
|
||||
|
||||
# find GA released tags for package and run test using that code base
|
||||
dep_pkg_name, version, _, _ = parse_setup(dep_pkg_path)
|
||||
release_tag = get_release_tag(dep_pkg_name, self.context.is_latest_depend_test)
|
||||
if not release_tag:
|
||||
logging.error("Release tag is not available. Skipping package {} from test".format(dep_pkg_name))
|
||||
return
|
||||
|
||||
test_branch_name = "{0}_tests".format(release_tag)
|
||||
try:
|
||||
git_checkout_branch(test_branch_name, dep_pkg_path)
|
||||
except:
|
||||
# If git checkout failed for "tests" branch then checkout branch with release tag
|
||||
logging.info("Failed to checkout branch {}. Checking out release tagged git repo".format(test_branch_name))
|
||||
git_checkout_tag(release_tag, dep_pkg_path)
|
||||
|
||||
try:
|
||||
# install packages required to run tests
|
||||
run_check_call(
|
||||
[
|
||||
self.context.venv.python_executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"-r",
|
||||
test_tools_req_file,
|
||||
],
|
||||
dep_pkg_path
|
||||
)
|
||||
|
||||
# Install pre-built whl for current package
|
||||
install_package_from_whl(
|
||||
self.whl_path,
|
||||
self.context.temp_path,
|
||||
self.context.venv.python_executable,
|
||||
)
|
||||
# install package to be tested and run pytest
|
||||
self._execute_test(dep_pkg_path)
|
||||
finally:
|
||||
self.context.deinitialize(dep_pkg_path)
|
||||
|
||||
def _execute_test(self, dep_pkg_path):
|
||||
# install dependent package from source
|
||||
self._install_packages(dep_pkg_path, self.context.package_name)
|
||||
|
||||
# Ensure correct version of package is installed
|
||||
if not self._is_package_installed(self.context.package_name, self.context.pkg_version):
|
||||
logging.error("Incorrect version of package {0} is installed. Expected version {1}".format(self.context.package_name, self.context.pkg_version))
|
||||
sys.exit(1)
|
||||
|
||||
logging.info("Running test for {}".format(dep_pkg_path))
|
||||
commands = [
|
||||
self.context.venv.python_executable,
|
||||
"-m",
|
||||
"pytest",
|
||||
"--verbose",
|
||||
"--durations",
|
||||
"10",
|
||||
]
|
||||
|
||||
# add any pytest mark arg if present. for e.g. 'not cosmosEmulator'
|
||||
if self.context.pytest_mark_arg:
|
||||
commands.extend(["-m", self.context.pytest_mark_arg])
|
||||
|
||||
commands.append(self._get_package_test_dir(dep_pkg_path))
|
||||
run_check_call(commands, self.context.temp_path)
|
||||
|
||||
def _get_package_test_dir(self, pkg_root_path):
|
||||
# Returns path to test or tests folder within package root directory.
|
||||
paths = glob.glob(os.path.join(pkg_root_path, "test")) + glob.glob(os.path.join(pkg_root_path, "tests"))
|
||||
if paths is None:
|
||||
# We will run into this situation only if test and tests are missing in repo.
|
||||
# For now, running test for package repo itself to keep it same as regular CI in such cases
|
||||
logging.error("'test' folder is not found in {}".format(pkg_root_path))
|
||||
return pkg_root_path
|
||||
return paths[0]
|
||||
|
||||
def _install_packages(self, dependent_pkg_path, pkg_to_exclude):
|
||||
python_executable = self.context.venv.python_executable
|
||||
working_dir = self.context.package_root_path
|
||||
temp_dir = self.context.temp_path
|
||||
|
||||
list_to_exclude = [pkg_to_exclude,]
|
||||
installed_pkgs = [p.split('==')[0] for p in list(freeze.freeze(paths=self.context.venv.lib_paths)) if p.startswith('azure-')]
|
||||
logging.info("Installed azure sdk packages:{}".format(installed_pkgs))
|
||||
list_to_exclude.extend(installed_pkgs)
|
||||
# install dev requirement but skip already installed package which is being tested or present in dev requirement
|
||||
filtered_dev_req_path = filter_dev_requirements(
|
||||
dependent_pkg_path, list_to_exclude, dependent_pkg_path
|
||||
)
|
||||
|
||||
if filtered_dev_req_path:
|
||||
logging.info(
|
||||
"Installing filtered dev requirements from {}".format(filtered_dev_req_path)
|
||||
)
|
||||
run_check_call(
|
||||
[python_executable, "-m", "pip", "install", "-r", filtered_dev_req_path],
|
||||
dependent_pkg_path,
|
||||
)
|
||||
else:
|
||||
logging.info("dev requirements is not found to install")
|
||||
|
||||
# install dependent package which is being verified
|
||||
run_check_call(
|
||||
[python_executable, "-m", "pip", "install", dependent_pkg_path], temp_dir
|
||||
)
|
||||
|
||||
def _is_package_installed(self, package, version):
|
||||
# find env root and pacakge locations
|
||||
venv_root = self.context.venv.path
|
||||
site_packages = self.context.venv.lib_paths
|
||||
logging.info("Searching for packages in :{}".format(site_packages))
|
||||
installed_pkgs = list(freeze.freeze(paths=site_packages))
|
||||
logging.info("Installed packages: {}".format(installed_pkgs))
|
||||
# Verify installed package version
|
||||
# Search for exact version or dev build version of current version.
|
||||
pkg_search_string = "{0}=={1}".format(package, version)
|
||||
dev_build_search_string = "{0}=={1}.dev".format(package, version)
|
||||
return any(p == pkg_search_string or p.startswith(dev_build_search_string) for p in installed_pkgs)
|
||||
|
||||
|
||||
# This method identifies package dependency map for all packages in azure sdk
|
||||
def find_package_dependency(glob_string, repo_root_dir):
|
||||
package_paths = process_glob_string(
|
||||
glob_string, repo_root_dir, "", "Regression"
|
||||
)
|
||||
dependency_map = {}
|
||||
for pkg_root in package_paths:
|
||||
_, _, _, requires = parse_setup(pkg_root)
|
||||
|
||||
# Get a list of package names from install requires
|
||||
required_pkgs = [parse_require(r)[0] for r in requires]
|
||||
required_pkgs = [p for p in required_pkgs if p.startswith("azure")]
|
||||
|
||||
for req_pkg in required_pkgs:
|
||||
if req_pkg not in dependency_map:
|
||||
dependency_map[req_pkg] = []
|
||||
dependency_map[req_pkg].append(pkg_root)
|
||||
|
||||
logging.info("Package dependency: {}".format(dependency_map))
|
||||
return dependency_map
|
||||
|
||||
|
||||
# This is the main function which identifies packages to test, find dependency matrix and trigger test
|
||||
def run_main(args):
|
||||
|
||||
temp_dir = ""
|
||||
if args.temp_dir:
|
||||
temp_dir = args.temp_dir
|
||||
else:
|
||||
temp_dir = os.path.abspath(os.path.join(root_dir, "..", TEMP_FOLDER_NAME))
|
||||
|
||||
code_repo_root = os.path.join(temp_dir, GIT_REPO_NAME)
|
||||
# Make sure root_dir where script is running is not same as code repo which will be reverted to old released branch to run test
|
||||
if root_dir == code_repo_root:
|
||||
logging.error(
|
||||
"Invalid path to clone github code repo. Temporary path can not be same as current source root directory"
|
||||
)
|
||||
exit(1)
|
||||
|
||||
# Make sure temp path exists
|
||||
if not os.path.exists(temp_dir):
|
||||
os.mkdir(temp_dir)
|
||||
|
||||
if args.service:
|
||||
service_dir = os.path.join("sdk", args.service)
|
||||
target_dir = os.path.join(root_dir, service_dir)
|
||||
else:
|
||||
target_dir = root_dir
|
||||
|
||||
targeted_packages = process_glob_string(args.glob_string, target_dir, "", "Regression")
|
||||
if len(targeted_packages) == 0:
|
||||
exit(0)
|
||||
|
||||
# clone code repo only if it doesn't exists
|
||||
if not os.path.exists(code_repo_root):
|
||||
clone_repo(temp_dir, AZURE_SDK_FOR_PYTHON_GIT_URL)
|
||||
else:
|
||||
logging.info(
|
||||
"Path {} already exists. Skipping step to clone github repo".format(
|
||||
code_repo_root
|
||||
)
|
||||
)
|
||||
|
||||
# find package dependency map for azure sdk
|
||||
pkg_dependency = find_package_dependency(AZURE_GLOB_STRING, code_repo_root)
|
||||
|
||||
# Create regression text context. One context object will be reused for all packages
|
||||
context = RegressionContext(
|
||||
args.whl_dir, temp_dir, args.verify_latest, args.mark_arg
|
||||
)
|
||||
|
||||
for pkg_path in targeted_packages:
|
||||
context.init_for_pkg(pkg_path)
|
||||
RegressionTest(context, pkg_dependency).run()
|
||||
logging.info("Regression test is completed successfully")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Run regression test for a package against released dependent packages"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"glob_string",
|
||||
nargs="?",
|
||||
help=(
|
||||
"A comma separated list of glob strings that will target the top level directories that contain packages."
|
||||
'Examples: All = "azure*", Single = "azure-keyvault", Targeted Multiple = "azure-keyvault,azure-mgmt-resource"'
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--service",
|
||||
help=(
|
||||
"Name of service directory (under sdk/) to test."
|
||||
"Example: --service applicationinsights"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--whl-dir",
|
||||
required=True,
|
||||
help=("Directory in which whl is pre built for all eligible package"),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--verify-latest",
|
||||
default=True,
|
||||
help=(
|
||||
"Set this parameter to true to verify regression against latest released version."
|
||||
"Default behavior is to test regression for oldest released version of dependent packages"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--temp-dir",
|
||||
help=(
|
||||
"Temporary path to clone github repo of azure-sdk-for-python to run tests. Any changes in this path will be overwritten"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--mark-arg",
|
||||
dest="mark_arg",
|
||||
help=(
|
||||
'The complete argument for `pytest -m "<input>"`. This can be used to exclude or include specific pytest markers.'
|
||||
'--mark_arg="not cosmosEmulator"'
|
||||
),
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
run_main(args)
|
|
@ -0,0 +1,389 @@
|
|||
import sys
|
||||
import os
|
||||
import errno
|
||||
import shutil
|
||||
import re
|
||||
import multiprocessing
|
||||
|
||||
if sys.version_info < (3, 0):
|
||||
from Queue import Queue
|
||||
else:
|
||||
from queue import Queue
|
||||
from threading import Thread
|
||||
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
from common_tasks import (
|
||||
process_glob_string,
|
||||
run_check_call,
|
||||
cleanup_folder,
|
||||
clean_coverage,
|
||||
log_file,
|
||||
read_file,
|
||||
is_error_code_5_allowed,
|
||||
create_code_coverage_params,
|
||||
find_whl,
|
||||
parse_setup
|
||||
)
|
||||
|
||||
from pkg_resources import parse_requirements, RequirementParseError
|
||||
import logging
|
||||
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", ".."))
|
||||
coverage_dir = os.path.join(root_dir, "_coverage/")
|
||||
pool_size = multiprocessing.cpu_count() * 2
|
||||
DEFAULT_TOX_INI_LOCATION = os.path.join(root_dir, "eng/tox/tox.ini")
|
||||
IGNORED_TOX_INIS = ["azure-cosmos"]
|
||||
|
||||
|
||||
class ToxWorkItem:
|
||||
def __init__(self, target_package_path, tox_env, options_array):
|
||||
self.target_package_path = target_package_path
|
||||
self.tox_env = tox_env
|
||||
self.options_array = options_array
|
||||
|
||||
|
||||
class Worker(Thread):
|
||||
def __init__(self, tasks):
|
||||
Thread.__init__(self)
|
||||
self.tasks = tasks
|
||||
self.daemon = True
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
func, args, kargs = self.tasks.get()
|
||||
try:
|
||||
func(*args, **kargs)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
finally:
|
||||
self.tasks.task_done()
|
||||
|
||||
|
||||
def in_ci():
|
||||
return os.getenv("TF_BUILD", False)
|
||||
|
||||
|
||||
class ThreadPool:
|
||||
def __init__(self, num_threads):
|
||||
self.tasks = Queue(num_threads)
|
||||
for _ in range(num_threads):
|
||||
Worker(self.tasks)
|
||||
|
||||
def add_task(self, func, *args, **kargs):
|
||||
self.tasks.put((func, args, kargs))
|
||||
|
||||
def map(self, func, args_list):
|
||||
for args in args_list:
|
||||
self.add_task(func, args)
|
||||
|
||||
def wait_completion(self):
|
||||
self.tasks.join()
|
||||
|
||||
|
||||
def combine_coverage_files(targeted_packages):
|
||||
# find tox.ini file. tox.ini is used to combine coverage paths to generate formatted report
|
||||
tox_ini_file = os.path.join(root_dir, "eng", "tox", "tox.ini")
|
||||
config_file_flag = "--rcfile={}".format(tox_ini_file)
|
||||
|
||||
if os.path.isfile(tox_ini_file):
|
||||
# for every individual coverage file, run coverage combine to combine path
|
||||
for package_dir in [package for package in targeted_packages]:
|
||||
coverage_file = os.path.join(package_dir, ".coverage")
|
||||
if os.path.isfile(coverage_file):
|
||||
cov_cmd_array = [sys.executable, "-m", "coverage", "combine"]
|
||||
# tox.ini file has coverage paths to combine
|
||||
# Pas tox.ini as coverage config file
|
||||
cov_cmd_array.extend([config_file_flag, coverage_file])
|
||||
run_check_call(cov_cmd_array, package_dir)
|
||||
else:
|
||||
# not a hard error at this point
|
||||
# this combine step is required only for modules if report has package name starts with .tox
|
||||
logging.error("tox.ini is not found in path {}".format(root_dir))
|
||||
|
||||
|
||||
def collect_tox_coverage_files(targeted_packages):
|
||||
root_coverage_dir = os.path.join(root_dir, "_coverage/")
|
||||
|
||||
clean_coverage(coverage_dir)
|
||||
|
||||
# coverage report has paths starting .tox and azure
|
||||
# coverage combine fixes this with the help of tox.ini[coverage:paths]
|
||||
combine_coverage_files(targeted_packages)
|
||||
|
||||
coverage_files = []
|
||||
# generate coverage files
|
||||
for package_dir in [package for package in targeted_packages]:
|
||||
coverage_file = os.path.join(package_dir, ".coverage")
|
||||
if os.path.isfile(coverage_file):
|
||||
destination_file = os.path.join(
|
||||
root_coverage_dir, ".coverage_{}".format(os.path.basename(package_dir))
|
||||
)
|
||||
shutil.copyfile(coverage_file, destination_file)
|
||||
coverage_files.append(destination_file)
|
||||
|
||||
logging.info("Visible uncombined .coverage files: {}".format(coverage_files))
|
||||
|
||||
if len(coverage_files):
|
||||
cov_cmd_array = [sys.executable, "-m", "coverage", "combine"]
|
||||
cov_cmd_array.extend(coverage_files)
|
||||
|
||||
# merge them with coverage combine and copy to root
|
||||
run_check_call(cov_cmd_array, os.path.join(root_dir, "_coverage/"))
|
||||
|
||||
source = os.path.join(coverage_dir, "./.coverage")
|
||||
dest = os.path.join(root_dir, ".coverage")
|
||||
|
||||
shutil.move(source, dest)
|
||||
|
||||
|
||||
def individual_workload(tox_command_tuple, workload_results):
|
||||
pkg = os.path.basename(tox_command_tuple[1])
|
||||
stdout = os.path.join(tox_command_tuple[1], "stdout.txt")
|
||||
stderr = os.path.join(tox_command_tuple[1], "stderr.txt")
|
||||
tox_dir = os.path.join(tox_command_tuple[1], "./.tox/")
|
||||
|
||||
with open(stdout, "w") as f_stdout, open(stderr, "w") as f_stderr:
|
||||
proc = Popen(
|
||||
tox_command_tuple[0],
|
||||
stdout=f_stdout,
|
||||
stderr=f_stderr,
|
||||
cwd=tox_command_tuple[1],
|
||||
env=os.environ.copy(),
|
||||
)
|
||||
|
||||
logging.info("POpened task for for {}".format(pkg))
|
||||
proc.wait()
|
||||
|
||||
return_code = proc.returncode
|
||||
|
||||
if proc.returncode != 0:
|
||||
logging.error("{} returned with code {}".format(pkg, proc.returncode))
|
||||
else:
|
||||
logging.info(
|
||||
"{} returned with code 0, output will be printed after the test run completes.".format(
|
||||
pkg
|
||||
)
|
||||
)
|
||||
|
||||
if read_file(stderr):
|
||||
logging.error("Package {} had stderror output. Logging.".format(pkg))
|
||||
return_code = "StdErr output detected"
|
||||
|
||||
workload_results[tox_command_tuple[1]] = (return_code, stdout, stderr)
|
||||
|
||||
if in_ci():
|
||||
shutil.rmtree(tox_dir)
|
||||
|
||||
|
||||
def execute_tox_parallel(tox_command_tuples):
|
||||
pool = ThreadPool(pool_size)
|
||||
workload_results = {}
|
||||
run_result = 0
|
||||
|
||||
for index, cmd_tuple in enumerate(tox_command_tuples):
|
||||
pool.add_task(individual_workload, cmd_tuple, workload_results)
|
||||
|
||||
pool.wait_completion()
|
||||
|
||||
for key in workload_results.keys():
|
||||
log_file(workload_results[key][1])
|
||||
|
||||
if workload_results[key][0] != 0:
|
||||
logging.error(
|
||||
"{} tox invocation exited with returncode {}".format(
|
||||
os.path.basename(key), workload_results[key][0]
|
||||
)
|
||||
)
|
||||
run_result = 1
|
||||
|
||||
return run_result
|
||||
|
||||
|
||||
def compare_req_to_injected_reqs(parsed_req, injected_packages):
|
||||
if parsed_req is None:
|
||||
return False
|
||||
|
||||
return any(parsed_req.name in req for req in injected_packages)
|
||||
|
||||
|
||||
def inject_custom_reqs(file, injected_packages, package_dir):
|
||||
req_lines = []
|
||||
injected_packages = [p for p in re.split("[\s,]", injected_packages) if p]
|
||||
|
||||
if injected_packages:
|
||||
logging.info(
|
||||
"Adding custom packages to requirements for {}".format(package_dir)
|
||||
)
|
||||
with open(file, "r") as f:
|
||||
for line in f:
|
||||
try:
|
||||
parsed_req = [req for req in parse_requirements(line)]
|
||||
except RequirementParseError as e:
|
||||
parsed_req = [None]
|
||||
req_lines.append((line, parsed_req))
|
||||
|
||||
if req_lines:
|
||||
all_adjustments = injected_packages + [
|
||||
line_tuple[0].strip()
|
||||
for line_tuple in req_lines
|
||||
if line_tuple[0].strip()
|
||||
and not compare_req_to_injected_reqs(
|
||||
line_tuple[1][0], injected_packages
|
||||
)
|
||||
]
|
||||
else:
|
||||
all_adjustments = injected_packages
|
||||
|
||||
with open(file, "w") as f:
|
||||
# note that we directly use '\n' here instead of os.linesep due to how f.write() actually handles this stuff internally
|
||||
# If a file is opened in text mode (the default), during write python will accidentally double replace due to "\r" being
|
||||
# replaced with "\r\n" on Windows. Result: "\r\n\n". Extra line breaks!
|
||||
f.write("\n".join(all_adjustments))
|
||||
|
||||
|
||||
def build_whl_for_req(req, package_path):
|
||||
if ".." in req:
|
||||
# Create temp path if it doesn't exist
|
||||
temp_dir = os.path.join(package_path, ".tmp_whl_dir")
|
||||
if not os.path.exists(temp_dir):
|
||||
os.mkdir(temp_dir)
|
||||
|
||||
req_pkg_path = os.path.abspath(os.path.join(package_path, req.replace("\n", "")))
|
||||
pkg_name, version, _, _ = parse_setup(req_pkg_path)
|
||||
logging.info("Building wheel for package {}".format(pkg_name))
|
||||
run_check_call([sys.executable, "setup.py", "bdist_wheel", "-d", temp_dir], req_pkg_path)
|
||||
|
||||
whl_path = find_whl(pkg_name, version, temp_dir)
|
||||
logging.info("Wheel for package {0} is {1}".format(pkg_name, whl_path))
|
||||
logging.info("Replacing dev requirement. Old requirement:{0}, New requirement:{1}".format(req, whl_path))
|
||||
return whl_path
|
||||
else:
|
||||
return req
|
||||
|
||||
def replace_dev_reqs(file):
|
||||
adjusted_req_lines = []
|
||||
|
||||
with open(file, "r") as f:
|
||||
for line in f:
|
||||
args = [
|
||||
part.strip()
|
||||
for part in line.split()
|
||||
if part and not part.strip() == "-e"
|
||||
]
|
||||
amended_line = " ".join(args)
|
||||
adjusted_req_lines.append(amended_line)
|
||||
|
||||
logging.info("Old dev requirements:{}".format(adjusted_req_lines))
|
||||
adjusted_req_lines = list(map(lambda x: build_whl_for_req(x, os.path.dirname(file)), adjusted_req_lines))
|
||||
logging.info("New dev requirements:{}".format(adjusted_req_lines))
|
||||
|
||||
with open(file, "w") as f:
|
||||
# note that we directly use '\n' here instead of os.linesep due to how f.write() actually handles this stuff internally
|
||||
# If a file is opened in text mode (the default), during write python will accidentally double replace due to "\r" being
|
||||
# replaced with "\r\n" on Windows. Result: "\r\n\n". Extra line breaks!
|
||||
f.write("\n".join(adjusted_req_lines))
|
||||
|
||||
|
||||
def execute_tox_serial(tox_command_tuples):
|
||||
return_code = 0
|
||||
|
||||
for index, cmd_tuple in enumerate(tox_command_tuples):
|
||||
tox_dir = os.path.abspath(os.path.join(cmd_tuple[1], "./.tox/"))
|
||||
|
||||
logging.info(
|
||||
"Running tox for {}. {} of {}.".format(
|
||||
os.path.basename(cmd_tuple[1]), index + 1, len(tox_command_tuples)
|
||||
)
|
||||
)
|
||||
|
||||
result = run_check_call(cmd_tuple[0], cmd_tuple[1], always_exit=False)
|
||||
|
||||
if result is not None and result != 0:
|
||||
return_code = result
|
||||
|
||||
if in_ci():
|
||||
shutil.rmtree(tox_dir)
|
||||
|
||||
return return_code
|
||||
|
||||
|
||||
def prep_and_run_tox(targeted_packages, parsed_args, options_array=[]):
|
||||
if parsed_args.wheel_dir:
|
||||
os.environ["PREBUILT_WHEEL_DIR"] = parsed_args.wheel_dir
|
||||
|
||||
if parsed_args.mark_arg:
|
||||
options_array.extend(["-m", "{}".format(parsed_args.mark_arg)])
|
||||
|
||||
tox_command_tuples = []
|
||||
|
||||
for index, package_dir in enumerate(targeted_packages):
|
||||
destination_tox_ini = os.path.join(package_dir, "tox.ini")
|
||||
destination_dev_req = os.path.join(package_dir, "dev_requirements.txt")
|
||||
|
||||
tox_execution_array = [sys.executable, "-m", "tox"]
|
||||
|
||||
local_options_array = options_array[:]
|
||||
|
||||
# Get code coverage params for current package
|
||||
package_name = os.path.basename(package_dir)
|
||||
coverage_commands = create_code_coverage_params(parsed_args, package_name)
|
||||
local_options_array.extend(coverage_commands)
|
||||
|
||||
pkg_egg_info_name = "{}.egg-info".format(package_name.replace("-", "_"))
|
||||
local_options_array.extend(["--ignore", pkg_egg_info_name])
|
||||
|
||||
# if we are targeting only packages that are management plane, it is a possibility
|
||||
# that no tests running is an acceptable situation
|
||||
# we explicitly handle this here.
|
||||
if is_error_code_5_allowed(package_dir, package_name):
|
||||
local_options_array.append("--suppress-no-test-exit-code")
|
||||
|
||||
# if not present, re-use base
|
||||
if not os.path.exists(destination_tox_ini) or (
|
||||
os.path.exists(destination_tox_ini)
|
||||
and os.path.basename(package_dir) in IGNORED_TOX_INIS
|
||||
):
|
||||
logging.info(
|
||||
"No customized tox.ini present, using common eng/tox/tox.ini for {}".format(
|
||||
os.path.basename(package_dir)
|
||||
)
|
||||
)
|
||||
tox_execution_array.extend(["-c", DEFAULT_TOX_INI_LOCATION])
|
||||
|
||||
# handle empty file
|
||||
if not os.path.exists(destination_dev_req):
|
||||
logging.info("No dev_requirements present.")
|
||||
with open(destination_dev_req, "w+") as file:
|
||||
file.write("\n")
|
||||
|
||||
if in_ci():
|
||||
replace_dev_reqs(destination_dev_req)
|
||||
os.environ["TOX_PARALLEL_NO_SPINNER"] = "1"
|
||||
|
||||
inject_custom_reqs(
|
||||
destination_dev_req, parsed_args.injected_packages, package_dir
|
||||
)
|
||||
|
||||
if parsed_args.tox_env:
|
||||
tox_execution_array.extend(["-e", parsed_args.tox_env])
|
||||
|
||||
if parsed_args.tenvparallel:
|
||||
tox_execution_array.extend(["-p", "all"])
|
||||
|
||||
if local_options_array:
|
||||
tox_execution_array.extend(["--"] + local_options_array)
|
||||
|
||||
tox_command_tuples.append((tox_execution_array, package_dir))
|
||||
|
||||
if parsed_args.tparallel:
|
||||
return_code = execute_tox_parallel(tox_command_tuples)
|
||||
else:
|
||||
return_code = execute_tox_serial(tox_command_tuples)
|
||||
|
||||
if not parsed_args.disablecov:
|
||||
collect_tox_coverage_files(targeted_packages)
|
||||
|
||||
sys.exit(return_code)
|
|
@ -0,0 +1,132 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# --------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
# --------------------------------------------------------------------------------------------
|
||||
|
||||
# Normally, this module will be executed as referenced as part of the devops build definitions.
|
||||
# An enterprising user can easily glance over this and leverage for their own purposes.
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
|
||||
from common_tasks import process_glob_string, parse_setup, run_check_call
|
||||
|
||||
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
root_dir = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "..", ".."))
|
||||
psscript = os.path.join(root_dir, "scripts", "devops_tasks", "find_change_log.ps1")
|
||||
|
||||
# Service fabric change log has non standard versioning for e.g 7.0.0.0
|
||||
# Verify change log should skip this package since this script looks for standard version format of x.y.z
|
||||
NON_STANDARD_CHANGE_LOG_PACKAGES = ["azure-servicefabric",]
|
||||
|
||||
def find_change_log(targeted_package, version):
|
||||
# Execute powershell script to find a matching version in change log
|
||||
command_array = ["pwsh"]
|
||||
command_array.append("-File {}".format(psscript))
|
||||
command_array.append("-workingDir {}".format(targeted_package))
|
||||
command_array.append("-version {}".format(version))
|
||||
command_array.append("set-ExecutionPolicy Unrestricted")
|
||||
|
||||
allowed_return_codes = []
|
||||
|
||||
# Execute powershell script to verify version
|
||||
er_result = run_check_call(
|
||||
command_array, root_dir, allowed_return_codes, True, False
|
||||
)
|
||||
|
||||
if er_result:
|
||||
logging.error(
|
||||
"Failed to find version in change log for package {}".format(
|
||||
targeted_package
|
||||
)
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def verify_packages(targeted_packages):
|
||||
# run the build and distribution
|
||||
change_log_missing = {}
|
||||
|
||||
for package in targeted_packages:
|
||||
# Parse setup.py using common helper method to get version and package name
|
||||
pkg_name, version, _, _ = parse_setup(package)
|
||||
|
||||
# Skip management packages and any explicitly excluded packages
|
||||
if "-mgmt" in pkg_name or pkg_name in NON_STANDARD_CHANGE_LOG_PACKAGES:
|
||||
logging.info("Skipping {} due to known exclusion in change log verification".format(pkg_name))
|
||||
continue
|
||||
|
||||
if not find_change_log(package, version):
|
||||
logging.error(
|
||||
"Change log is not updated for package {0}, version {1}".format(
|
||||
pkg_name, version
|
||||
)
|
||||
)
|
||||
change_log_missing[pkg_name] = version
|
||||
|
||||
return change_log_missing
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Verifies latest version is updated in change log, Called from DevOps YAML Pipeline"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"glob_string",
|
||||
nargs="?",
|
||||
help=(
|
||||
"A comma separated list of glob strings that will target the top level directories that contain packages. "
|
||||
'Examples: All == "azure-*", Single = "azure-keyvault"'
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--service",
|
||||
help=(
|
||||
"Name of service directory (under sdk/) to build."
|
||||
"Example: --service applicationinsights"
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--pkgfilter",
|
||||
default="",
|
||||
dest="package_filter_string",
|
||||
help=(
|
||||
"An additional string used to filter the set of artifacts by a simple CONTAINS clause."
|
||||
),
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# We need to support both CI builds of everything and individual service
|
||||
# folders. This logic allows us to do both.
|
||||
if args.service:
|
||||
service_dir = os.path.join("sdk", args.service)
|
||||
target_dir = os.path.join(root_dir, service_dir)
|
||||
else:
|
||||
target_dir = root_dir
|
||||
|
||||
# Skip nspkg and metapackage from version check.
|
||||
# Change log file may be missing for these two types
|
||||
# process glob helper methods filter nspkg and metapackages with filter type "Docs"
|
||||
targeted_packages = process_glob_string(
|
||||
args.glob_string, target_dir, args.package_filter_string, "Docs"
|
||||
)
|
||||
change_missing = verify_packages(targeted_packages)
|
||||
if len(change_missing) > 0:
|
||||
logging.error("Below packages do not have change log")
|
||||
logging.error("***************************************************")
|
||||
for pkg_name in change_missing.keys():
|
||||
logging.error("{0} - {1}".format(pkg_name, change_missing[pkg_name]))
|
||||
|
||||
sys.exit(1)
|
|
@ -0,0 +1,122 @@
|
|||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_TAG_PREFIX = """### Tag: package-{api_version}-only
|
||||
|
||||
These settings apply only when `--tag=package-{api_version}-only` is specified on the command line.
|
||||
|
||||
```yaml $(tag) == 'package-{api_version}-only'
|
||||
input-file:"""
|
||||
_TAG_SUFFIX = "```\n\n"
|
||||
|
||||
_BATCH_PREFIX = """```yaml $(python) && $(multiapi)
|
||||
batch:"""
|
||||
_BATCH_SUFFIX = "```\n\n"
|
||||
|
||||
_PY_NAMESPACE = """### Tag: package-{api_version}-only and python
|
||||
|
||||
These settings apply only when `--tag=package-{api_version}-only --python` is specified on the command line.
|
||||
Please also specify `--python-sdks-folder=<path to the root directory of your azure-sdk-for-python clone>`.
|
||||
|
||||
``` yaml $(tag) == 'package-{api_version}-only' && $(python)
|
||||
python:
|
||||
namespace: $(python-base-namespace).{ns}
|
||||
output-folder: $(python-sdks-folder)/$(python-base-folder)/{ns}
|
||||
```
|
||||
"""
|
||||
|
||||
def get_api_versions(root):
|
||||
|
||||
api_versions = {}
|
||||
prefixes_per_path = {}
|
||||
|
||||
rp_folders = root.glob("Microsoft.*")
|
||||
for rp_folder in rp_folders:
|
||||
_LOGGER.info(f"Parsing folder {rp_folder}")
|
||||
for preview_stable in rp_folder.iterdir():
|
||||
_LOGGER.info(f"Currently in {preview_stable}")
|
||||
for api_version in preview_stable.iterdir():
|
||||
_LOGGER.info(f"Currently in {api_version}")
|
||||
for swagger in api_version.glob("*.json"):
|
||||
prefixes_per_path[swagger] = parse_swagger(swagger)
|
||||
api_versions.setdefault(api_version.name, []).append(swagger.relative_to(root).as_posix())
|
||||
|
||||
# Try to detect when it's problematic. That's tough, the following logic is definitely
|
||||
# not handling all the touch parts yet...
|
||||
|
||||
# 1- If a file declare several prefixes, let's warning
|
||||
for swagger_path, prefixed_used in prefixes_per_path.items():
|
||||
if len(prefixed_used) == 1:
|
||||
_LOGGER.info(f"File {swagger_path} uses only one prefix: {prefixed_used}")
|
||||
else:
|
||||
_LOGGER.warn(f"File {swagger_path} uses several prefixes: {prefixed_used}")
|
||||
|
||||
|
||||
# Let's print
|
||||
print_tags(api_versions)
|
||||
print_batch(api_versions)
|
||||
print_python_namespace(api_versions)
|
||||
|
||||
def print_tags(api_versions):
|
||||
for api_version in sorted(api_versions.keys(), reverse=True):
|
||||
swagger_files = api_versions[api_version]
|
||||
print(_TAG_PREFIX.format(api_version=api_version))
|
||||
for swagger_file in swagger_files:
|
||||
print("- {}".format(swagger_file))
|
||||
print(_TAG_SUFFIX)
|
||||
|
||||
|
||||
def print_batch(api_versions):
|
||||
print(_BATCH_PREFIX)
|
||||
for api_version in sorted(api_versions.keys(), reverse=True):
|
||||
print(f" - tag: package-{api_version}-only")
|
||||
print(_BATCH_SUFFIX)
|
||||
|
||||
def print_python_namespace(api_versions):
|
||||
for api_version in sorted(api_versions.keys(), reverse=True):
|
||||
swagger_files = api_versions[api_version]
|
||||
print(_PY_NAMESPACE.format(
|
||||
api_version=api_version,
|
||||
ns="v"+api_version.replace("-", "_"))
|
||||
)
|
||||
|
||||
def parse_swagger(swagger_path):
|
||||
_LOGGER.info(f"Parsing {swagger_path}")
|
||||
with swagger_path.open() as swagger:
|
||||
parsed_swagger = json.load(swagger)
|
||||
|
||||
api_version = parsed_swagger["info"]["version"]
|
||||
|
||||
operations = operation_finder(parsed_swagger)
|
||||
|
||||
prefixed_used = {op.split("_")[0] for op in operations if "_" in op}
|
||||
return prefixed_used
|
||||
|
||||
def operation_finder(swagger_root):
|
||||
result = set()
|
||||
for key, node in swagger_root.items():
|
||||
if key == "definitions": # Skipping some node
|
||||
return result
|
||||
if key == "operationId":
|
||||
result.add(node)
|
||||
# Can skip it now, only one operationId per node
|
||||
return result
|
||||
if isinstance(node, dict):
|
||||
result |= operation_finder(node)
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.DEBUG if "--debug" in sys.argv else logging.WARNING)
|
||||
|
||||
root = Path(__file__).parent
|
||||
|
||||
root = Path(sys.argv[1]).relative_to(root)
|
||||
|
||||
_LOGGER.info(f"My root: {root}")
|
||||
get_api_versions(root)
|
|
@ -0,0 +1,545 @@
|
|||
import argparse
|
||||
import ast
|
||||
import importlib
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
import re
|
||||
import sys
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from typing import List, Tuple, Any, Union
|
||||
|
||||
try:
|
||||
import msrestazure
|
||||
except: # Install msrestazure. Would be best to mock it, since we don't need it, but all scenarios I know are fine with a pip install for now
|
||||
import subprocess
|
||||
|
||||
subprocess.call(
|
||||
sys.executable + " -m pip install msrestazure --index-url https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi/simple/", shell=True
|
||||
) # Use shell to use venv if available
|
||||
|
||||
try:
|
||||
from jinja2 import Template, FileSystemLoader, Environment
|
||||
except:
|
||||
import subprocess
|
||||
|
||||
subprocess.call(
|
||||
sys.executable + " -m pip install jinja2 --index-url https://pkgs.dev.azure.com/azure-sdk/public/_packaging/azure-sdk-for-python/pypi/simple/", shell=True
|
||||
) # Use shell to use venv if available
|
||||
from jinja2 import Template, FileSystemLoader, Environment
|
||||
|
||||
|
||||
try:
|
||||
import azure.common
|
||||
except:
|
||||
sys.path.append(
|
||||
str((Path(__file__).parents[1] / "sdk" / "core" / "azure-common").resolve())
|
||||
)
|
||||
import azure.common
|
||||
|
||||
import pkg_resources
|
||||
|
||||
pkg_resources.declare_namespace("azure")
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def parse_input(input_parameter):
|
||||
"""From a syntax like package_name#submodule, build a package name
|
||||
and complete module name.
|
||||
"""
|
||||
split_package_name = input_parameter.split("#")
|
||||
package_name = split_package_name[0]
|
||||
module_name = package_name.replace("-", ".")
|
||||
if len(split_package_name) >= 2:
|
||||
module_name = ".".join([module_name, split_package_name[1]])
|
||||
return package_name, module_name
|
||||
|
||||
|
||||
# given an input of a name, we need to return the appropriate relative diff between the sdk_root and the actual package directory
|
||||
def resolve_package_directory(package_name, sdk_root=None):
|
||||
packages = [
|
||||
p.parent
|
||||
for p in (
|
||||
list(sdk_root.glob("{}/setup.py".format(package_name)))
|
||||
+ list(sdk_root.glob("sdk/*/{}/setup.py".format(package_name)))
|
||||
)
|
||||
]
|
||||
|
||||
if len(packages) > 1:
|
||||
print(
|
||||
"There should only be a single package matched in either repository structure. The following were found: {}".format(
|
||||
packages
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
return str(packages[0].relative_to(sdk_root))
|
||||
|
||||
|
||||
def get_versioned_modules(
|
||||
package_name: str, module_name: str, sdk_root: Path = None
|
||||
) -> List[Tuple[str, Any]]:
|
||||
"""Get (label, submodule) where label starts with "v20" and submodule is the corresponding imported module.
|
||||
"""
|
||||
if not sdk_root:
|
||||
sdk_root = Path(__file__).parents[1]
|
||||
|
||||
path_to_package = resolve_package_directory(package_name, sdk_root)
|
||||
azure.__path__.append(str((sdk_root / path_to_package / "azure").resolve()))
|
||||
|
||||
# Doesn't work with namespace package
|
||||
# sys.path.append(str((sdk_root / package_name).resolve()))
|
||||
module_to_generate = importlib.import_module(module_name)
|
||||
return {
|
||||
label: importlib.import_module("." + label, module_to_generate.__name__)
|
||||
for (_, label, ispkg) in pkgutil.iter_modules(module_to_generate.__path__)
|
||||
if label.startswith("v20") and ispkg
|
||||
}
|
||||
|
||||
|
||||
class ApiVersionExtractor(ast.NodeVisitor):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.api_version = None
|
||||
super(ApiVersionExtractor, self).__init__(*args, **kwargs)
|
||||
|
||||
def visit_Assign(self, node):
|
||||
try:
|
||||
if node.targets[0].id == "api_version":
|
||||
self.api_version = node.value.s
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
def extract_api_version_from_code(function):
|
||||
"""Will extract from __code__ the API version. Should be use if you use this is an operation group with no constant api_version.
|
||||
"""
|
||||
try:
|
||||
srccode = inspect.getsource(function)
|
||||
try:
|
||||
ast_tree = ast.parse(srccode)
|
||||
except IndentationError:
|
||||
ast_tree = ast.parse("with 0:\n" + srccode)
|
||||
|
||||
api_version_visitor = ApiVersionExtractor()
|
||||
api_version_visitor.visit(ast_tree)
|
||||
return api_version_visitor.api_version
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
|
||||
def get_client_class_name_from_module(module):
|
||||
"""Being a module that is an Autorest generation, get the client name."""
|
||||
# Using the fact that Client is always the first element in __all__
|
||||
# I externalize that code in a class in case we need to be smarter later
|
||||
return module.__all__[0]
|
||||
|
||||
|
||||
def build_operation_meta(versioned_modules):
|
||||
"""Introspect the client:
|
||||
|
||||
version_dict => {
|
||||
'application_gateways': [
|
||||
('v2018_05_01', 'ApplicationGatewaysOperations')
|
||||
]
|
||||
}
|
||||
mod_to_api_version => {'v2018_05_01': '2018-05-01'}
|
||||
"""
|
||||
|
||||
version_dict = {}
|
||||
mod_to_api_version = {}
|
||||
for versionned_label, versionned_mod in versioned_modules.items():
|
||||
extracted_api_versions = set()
|
||||
client_doc = versionned_mod.__dict__[
|
||||
get_client_class_name_from_module(versionned_mod)
|
||||
].__doc__
|
||||
operations = list(
|
||||
re.finditer(
|
||||
r":ivar (?P<attr>[a-z_0-9]+): \w+ operations\n\s+:vartype (?P=attr): .*.operations.(?P<clsname>\w+)\n",
|
||||
client_doc,
|
||||
)
|
||||
)
|
||||
for operation in operations:
|
||||
attr, clsname = operation.groups()
|
||||
_LOGGER.debug("Class name: %s", clsname)
|
||||
version_dict.setdefault(attr, []).append((versionned_label, clsname))
|
||||
|
||||
# Create a fake operation group to extract easily the real api version
|
||||
extracted_api_version = None
|
||||
try:
|
||||
extracted_api_version = versionned_mod.operations.__dict__[clsname](
|
||||
None, None, None, None
|
||||
).api_version
|
||||
_LOGGER.debug("Found an obvious API version: %s", extracted_api_version)
|
||||
if extracted_api_version:
|
||||
extracted_api_versions.add(extracted_api_version)
|
||||
except Exception:
|
||||
_LOGGER.debug(
|
||||
"Should not happen. I guess it mixed operation groups like VMSS Network..."
|
||||
)
|
||||
for func_name, function in versionned_mod.operations.__dict__[
|
||||
clsname
|
||||
].__dict__.items():
|
||||
if not func_name.startswith("__"):
|
||||
_LOGGER.debug("Try to extract API version from: %s", func_name)
|
||||
extracted_api_version = extract_api_version_from_code(function)
|
||||
_LOGGER.debug(
|
||||
"Extracted API version: %s", extracted_api_version
|
||||
)
|
||||
if extracted_api_version:
|
||||
extracted_api_versions.add(extracted_api_version)
|
||||
|
||||
if not extracted_api_versions:
|
||||
sys.exit(
|
||||
"Was not able to extract api_version of {}".format(versionned_label)
|
||||
)
|
||||
if len(extracted_api_versions) >= 2:
|
||||
# Mixed operation group, try to figure out what we want to use
|
||||
final_api_version = None
|
||||
_LOGGER.warning(
|
||||
"Found too much API version: {} in label {}".format(
|
||||
extracted_api_versions, versionned_label
|
||||
)
|
||||
)
|
||||
for candidate_api_version in extracted_api_versions:
|
||||
if (
|
||||
"v{}".format(candidate_api_version.replace("-", "_"))
|
||||
== versionned_label
|
||||
):
|
||||
final_api_version = candidate_api_version
|
||||
_LOGGER.warning(
|
||||
"Guessing you want {} based on label {}".format(
|
||||
final_api_version, versionned_label
|
||||
)
|
||||
)
|
||||
break
|
||||
else:
|
||||
sys.exit(
|
||||
"Unble to match {} to label {}".format(
|
||||
extracted_api_versions, versionned_label
|
||||
)
|
||||
)
|
||||
extracted_api_versions = {final_api_version}
|
||||
mod_to_api_version[versionned_label] = extracted_api_versions.pop()
|
||||
|
||||
# latest: api_version=mod_to_api_version[versions[-1][0]]
|
||||
|
||||
return version_dict, mod_to_api_version
|
||||
|
||||
|
||||
def build_operation_mixin_meta(versioned_modules):
|
||||
"""Introspect the client:
|
||||
|
||||
version_dict => {
|
||||
'check_dns_name_availability': {
|
||||
'doc': 'docstring',
|
||||
'signature': '(self, p1, p2, **operation_config),
|
||||
'call': 'p1, p2',
|
||||
'available_apis': [
|
||||
'v2018_05_01'
|
||||
]
|
||||
}
|
||||
}
|
||||
"""
|
||||
mixin_operations = {}
|
||||
|
||||
for versionned_label, versionned_mod in sorted(versioned_modules.items()):
|
||||
|
||||
client_name = get_client_class_name_from_module(versionned_mod)
|
||||
client_class = versionned_mod.__dict__[client_name]
|
||||
|
||||
# Detect if this client is using an operation mixin (Network)
|
||||
# Operation mixins are available since Autorest.Python 4.x
|
||||
operations_mixin = next(
|
||||
(c for c in client_class.__mro__ if "OperationsMixin" in c.__name__), None
|
||||
)
|
||||
if not operations_mixin:
|
||||
continue
|
||||
|
||||
for func_name, func in operations_mixin.__dict__.items():
|
||||
# Work only on functions
|
||||
if func_name.startswith("_"):
|
||||
continue
|
||||
|
||||
signature = inspect.signature(func)
|
||||
mixin_operations.setdefault(func_name, {}).setdefault(
|
||||
"available_apis", []
|
||||
).append(versionned_label)
|
||||
mixin_operations[func_name]["doc"] = func.__doc__
|
||||
mixin_operations[func_name]["signature"] = str(signature)
|
||||
mixin_operations[func_name]["call"] = ", ".join(
|
||||
list(signature.parameters)[1:-1]
|
||||
)
|
||||
|
||||
return mixin_operations
|
||||
|
||||
|
||||
def build_last_rt_list(
|
||||
versioned_operations_dict, mixin_operations, last_api_version, preview_mode
|
||||
):
|
||||
"""Build the a mapping RT => API version if RT doesn't exist in latest detected API version.
|
||||
|
||||
Example:
|
||||
last_rt_list = {
|
||||
'check_dns_name_availability': '2018-05-01'
|
||||
}
|
||||
|
||||
There is one subtle scenario if PREVIEW mode is disabled:
|
||||
- RT1 available on 2019-05-01 and 2019-06-01-preview
|
||||
- RT2 available on 2019-06-01-preview
|
||||
- RT3 available on 2019-07-01-preview
|
||||
|
||||
Then, if I put "RT2: 2019-06-01-preview" in the list, this means I have to make
|
||||
"2019-06-01-preview" the default for models loading (otherwise "RT2: 2019-06-01-preview" won't work).
|
||||
But this likely breaks RT1 default operations at "2019-05-01", with default models at "2019-06-01-preview"
|
||||
since "models" are shared for the entire set of operations groups (I wished models would be split by operation groups, but meh, that's not the case)
|
||||
|
||||
So, until we have a smarter Autorest to deal with that, only preview RTs which do not share models with a stable RT can be added to this map.
|
||||
In this case, RT2 is out, RT3 is in.
|
||||
"""
|
||||
|
||||
def there_is_a_rt_that_contains_api_version(rt_dict, api_version):
|
||||
"Test in the given api_version is is one of those RT."
|
||||
for rt_api_version in rt_dict.values():
|
||||
if api_version in rt_api_version:
|
||||
return True
|
||||
return False
|
||||
|
||||
last_rt_list = {}
|
||||
# Operation groups
|
||||
versioned_dict = {
|
||||
operation_name: [meta[0] for meta in operation_metadata]
|
||||
for operation_name, operation_metadata in versioned_operations_dict.items()
|
||||
}
|
||||
# Operations at client level
|
||||
versioned_dict.update(
|
||||
{
|
||||
operation_name: operation_metadata["available_apis"]
|
||||
for operation_name, operation_metadata in mixin_operations.items()
|
||||
}
|
||||
)
|
||||
|
||||
for operation, api_versions_list in versioned_dict.items():
|
||||
local_last_api_version = get_floating_latest(api_versions_list, preview_mode)
|
||||
if local_last_api_version == last_api_version:
|
||||
continue
|
||||
# If some others RT contains "local_last_api_version", and it's greater than the future default, danger, don't profile it
|
||||
if (
|
||||
there_is_a_rt_that_contains_api_version(
|
||||
versioned_dict, local_last_api_version
|
||||
)
|
||||
and local_last_api_version > last_api_version
|
||||
):
|
||||
continue
|
||||
last_rt_list[operation] = local_last_api_version
|
||||
|
||||
return last_rt_list
|
||||
|
||||
|
||||
def get_floating_latest(api_versions_list, preview_mode):
|
||||
"""Get the floating latest, from a random list of API versions.
|
||||
"""
|
||||
api_versions_list = list(api_versions_list)
|
||||
absolute_latest = sorted(api_versions_list)[-1]
|
||||
trimmed_preview = [
|
||||
version for version in api_versions_list if "preview" not in version
|
||||
]
|
||||
|
||||
# If there is no preview, easy: the absolute latest is the only latest
|
||||
if not trimmed_preview:
|
||||
return absolute_latest
|
||||
|
||||
# If preview mode, let's use the absolute latest, I don't care preview or stable
|
||||
if preview_mode:
|
||||
return absolute_latest
|
||||
|
||||
# If not preview mode, and there is preview, take the latest known stable
|
||||
return sorted(trimmed_preview)[-1]
|
||||
|
||||
|
||||
def find_module_folder(package_name, module_name):
|
||||
sdk_root = Path(__file__).parents[1]
|
||||
_LOGGER.debug("SDK root is: %s", sdk_root)
|
||||
path_to_package = resolve_package_directory(package_name, sdk_root)
|
||||
module_path = (
|
||||
sdk_root / Path(path_to_package) / Path(module_name.replace(".", os.sep))
|
||||
)
|
||||
_LOGGER.debug("Module path is: %s", module_path)
|
||||
return module_path
|
||||
|
||||
|
||||
def find_client_file(package_name, module_name):
|
||||
module_path = find_module_folder(package_name, module_name)
|
||||
return next(module_path.glob("*_client.py"))
|
||||
|
||||
|
||||
def patch_import(file_path: Union[str, Path]) -> None:
|
||||
"""If multi-client package, we need to patch import to be
|
||||
from ..version
|
||||
and not
|
||||
from .version
|
||||
|
||||
That should probably means those files should become a template, but since right now
|
||||
it's literally one dot, let's do it the raw way.
|
||||
"""
|
||||
# That's a dirty hack, maybe it's worth making configuration a template?
|
||||
with open(file_path, "rb") as read_fd:
|
||||
conf_bytes = read_fd.read()
|
||||
conf_bytes = conf_bytes.replace(
|
||||
b" .version", b" ..version"
|
||||
) # Just a dot right? Worth its own template for that? :)
|
||||
with open(file_path, "wb") as write_fd:
|
||||
write_fd.write(conf_bytes)
|
||||
|
||||
|
||||
def has_subscription_id(client_class):
|
||||
return "subscription_id" in inspect.signature(client_class).parameters
|
||||
|
||||
|
||||
def main(input_str, default_api=None):
|
||||
|
||||
# If True, means the auto-profile will consider preview versions.
|
||||
# If not, if it exists a stable API version for a global or RT, will always be used
|
||||
preview_mode = default_api and "preview" in default_api
|
||||
|
||||
# The only known multi-client package right now is azure-mgmt-resource
|
||||
is_multi_client_package = "#" in input_str
|
||||
|
||||
package_name, module_name = parse_input(input_str)
|
||||
versioned_modules = get_versioned_modules(package_name, module_name)
|
||||
versioned_operations_dict, mod_to_api_version = build_operation_meta(
|
||||
versioned_modules
|
||||
)
|
||||
|
||||
client_folder = find_module_folder(package_name, module_name)
|
||||
last_api_version = get_floating_latest(mod_to_api_version.keys(), preview_mode)
|
||||
|
||||
# I need default_api to be v2019_06_07_preview shaped if it exists, let's be smart
|
||||
# and change it automatically so I can take both syntax as input
|
||||
if default_api and not default_api.startswith("v"):
|
||||
last_api_version = [
|
||||
mod_api
|
||||
for mod_api, real_api in mod_to_api_version.items()
|
||||
if real_api == default_api
|
||||
][0]
|
||||
_LOGGER.info("Default API version will be: %s", last_api_version)
|
||||
|
||||
last_api_path = client_folder / last_api_version
|
||||
|
||||
# In case we are transitioning from a single api generation, clean old folders
|
||||
shutil.rmtree(str(client_folder / "operations"), ignore_errors=True)
|
||||
shutil.rmtree(str(client_folder / "models"), ignore_errors=True)
|
||||
|
||||
shutil.copy(
|
||||
str(client_folder / last_api_version / "_configuration.py"),
|
||||
str(client_folder / "_configuration.py"),
|
||||
)
|
||||
shutil.copy(
|
||||
str(client_folder / last_api_version / "__init__.py"),
|
||||
str(client_folder / "__init__.py"),
|
||||
)
|
||||
if is_multi_client_package:
|
||||
_LOGGER.warning("Patching multi-api client basic files")
|
||||
patch_import(client_folder / "_configuration.py")
|
||||
patch_import(client_folder / "__init__.py")
|
||||
|
||||
versionned_mod = versioned_modules[last_api_version]
|
||||
client_name = get_client_class_name_from_module(versionned_mod)
|
||||
client_class = versionned_mod.__dict__[client_name]
|
||||
|
||||
# Detect if this client is using an operation mixin (Network)
|
||||
# Operation mixins are available since Autorest.Python 4.x
|
||||
mixin_operations = build_operation_mixin_meta(versioned_modules)
|
||||
|
||||
# If we get a StopIteration here, means the API version folder is broken
|
||||
client_file_name = next(last_api_path.glob("*_client.py")).name
|
||||
|
||||
# versioned_operations_dict => {
|
||||
# 'application_gateways': [
|
||||
# ('v2018-05-01', 'ApplicationGatewaysOperations')
|
||||
# ]
|
||||
# }
|
||||
# mod_to_api_version => {'v2018-05-01': '2018-05-01'}
|
||||
# mixin_operations => {
|
||||
# 'check_dns_name_availability': {
|
||||
# 'doc': 'docstring',
|
||||
# 'signature': '(self, p1, p2, **operation_config),
|
||||
# 'call': 'p1, p2',
|
||||
# 'available_apis': [
|
||||
# 'v2018_05_01'
|
||||
# ]
|
||||
# }
|
||||
# }
|
||||
# last_rt_list = {
|
||||
# 'check_dns_name_availability': '2018-05-01'
|
||||
# }
|
||||
|
||||
last_rt_list = build_last_rt_list(
|
||||
versioned_operations_dict, mixin_operations, last_api_version, preview_mode
|
||||
)
|
||||
|
||||
conf = {
|
||||
"client_name": client_name,
|
||||
"has_subscription_id": has_subscription_id(client_class),
|
||||
"module_name": module_name,
|
||||
"operations": versioned_operations_dict,
|
||||
"mixin_operations": mixin_operations,
|
||||
"mod_to_api_version": mod_to_api_version,
|
||||
"last_api_version": mod_to_api_version[last_api_version],
|
||||
"client_doc": client_class.__doc__.split("\n")[0],
|
||||
"last_rt_list": last_rt_list,
|
||||
"default_models": sorted(
|
||||
{last_api_version} | {versions for _, versions in last_rt_list.items()}
|
||||
),
|
||||
}
|
||||
|
||||
env = Environment(
|
||||
loader=FileSystemLoader(str(Path(__file__).parents[0] / "templates")),
|
||||
keep_trailing_newline=True,
|
||||
)
|
||||
|
||||
for template_name in env.list_templates():
|
||||
# Don't generate files if they is not operations mixins
|
||||
if template_name == "_operations_mixin.py" and not mixin_operations:
|
||||
continue
|
||||
|
||||
# Some file doesn't use the template name
|
||||
if template_name == "_multiapi_client.py":
|
||||
output_filename = client_file_name
|
||||
else:
|
||||
output_filename = template_name
|
||||
|
||||
future_filepath = client_folder / output_filename
|
||||
|
||||
template = env.get_template(template_name)
|
||||
result = template.render(**conf)
|
||||
|
||||
with future_filepath.open("w") as fd:
|
||||
fd.write(result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Multi-API client generation for Azure SDK for Python"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--debug", dest="debug", action="store_true", help="Verbosity in DEBUG mode"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--default-api-version",
|
||||
dest="default_api",
|
||||
default=None,
|
||||
help="Force default API version, do not detect it. [default: %(default)s]",
|
||||
)
|
||||
parser.add_argument("package_name", help="The package name.")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
main_logger = logging.getLogger()
|
||||
logging.basicConfig()
|
||||
main_logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
|
||||
|
||||
main(args.package_name, default_api=args.default_api)
|
|
@ -0,0 +1,60 @@
|
|||
## Linting the Guidelines
|
||||
|
||||
In order to lint for the guidelines, you must make sure you are using the pylintrc file.
|
||||
It is recommended you run pylint at the library package level to be consistent with how the CI runs pylint.
|
||||
|
||||
Check that you are running pylint version >=2.31 and astroid version >=2.25.
|
||||
|
||||
**How to run pylint locally using the pylintrc:**
|
||||
|
||||
1. Run pylint at the root of the repo and it will automatically find the pylintrc:
|
||||
```bash
|
||||
C:\azure-sdk-for-python>pylint sdk/storage/azure-storage-blob/azure
|
||||
```
|
||||
2. Add the --rcfile command line argument with a relative path to the pylintrc from your current directory:
|
||||
```bash
|
||||
C:\azure-sdk-for-python\sdk\storage>pylint --rcfile="../../pylintrc" azure-storage-blob
|
||||
```
|
||||
3. Set the environment variable PYLINTRC to the absolute path of the pylintrc file:
|
||||
```bash
|
||||
set PYLINTRC=C:\azure-sdk-for-python\pylintrc
|
||||
```
|
||||
Run pylint:
|
||||
```bash
|
||||
C:\azure-sdk-for-python\sdk\storage>pylint azure-storage-blob
|
||||
```
|
||||
4. Run pylint at the package level using tox and it will find the pylintrc file:
|
||||
```bash
|
||||
C:\azure-sdk-for-python\sdk\storage\azure-storage-blob>tox -c ../../../eng/tox/tox.ini -e lint
|
||||
```
|
||||
5. If you use the pylint extension for VS code or Pycharm it *should* find the pylintrc automatically.
|
||||
|
||||
**How to disable a pylint error:**
|
||||
```bash
|
||||
# pylint:disable=connection-string-should-not-be-constructor-param
|
||||
```
|
||||
|
||||
The pylint custom checkers for SDK guidelines fall into messages range C4717 - C4738.
|
||||
You will know you came across a custom checker if it contains a link to the guidelines.
|
||||
|
||||
In the case of a false positive, use the disable command to remove the pylint error.
|
||||
|
||||
**Guidelines that are currently linted:**
|
||||
|
||||
| Pylint checker name | How to fix this | How to disable this rule | Link to python guideline |
|
||||
|----------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------|
|
||||
| client-method-should-not-use-static-method | Use module level functions instead. | # pylint:disable=connection-string-should-not-be-constructor-param | [link](https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods) |
|
||||
| missing-client-constructor-parameter-credential | Add a credential parameter to the client constructor. Do not use plural form "credentials". | # pylint:disable=missing-client-constructor-parameter-credential | [link](https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods) |
|
||||
| missing-client-constructor-parameter-kwargs | Add a **kwargs parameter to the client constructor. | # pylint:disable=missing-client-constructor-parameter-kwargs | [link](https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods) |
|
||||
| client-method-has-more-than-5-positional-arguments | Use keyword arguments to reduce number of positional arguments. | # pylint:disable=client-method-has-more-than-5-positional-arguments | [link](https://azure.github.io/azure-sdk/python_introduction.html#method-signatures) |
|
||||
| client-method-missing-type-annotations | Check that param/return type comments are present or that param/return type annotations are present. Check that you did not mix type comments with type annotations. | # pylint:disable=client-method-missing-type-annotations | [link](https://azure.github.io/azure-sdk/python_introduction.html#types-or-not) |
|
||||
| client-incorrect-naming-convention | Check that you use... snake_case for variable, function, and method names. Pascal case for types. ALL CAPS for constants. | # pylint:disable=client-incorrect-naming-convention | [link](https://azure.github.io/azure-sdk/python_introduction.html#naming-conventions) |
|
||||
| client-method-missing-kwargs | Check that any methods that make network calls have a **kwargs parameter. | # pylint:disable=client-method-missing-kwargs | [link](https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods) |
|
||||
| config-missing-kwargs-in-policy | Check that the policies in your configuration function contain a **kwargs parameter. | # pylint:disable=config-missing-kwargs-in-policy | [link](https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods) |
|
||||
| async-client-bad-name | Remove "Async" from your service client's name. | # pylint:disable=async-client-bad-name | [link](https://azure.github.io/azure-sdk/python_design.html#async-support) |
|
||||
| file-needs-copyright-header | Add a copyright header to the top of your file. | # pylint:disable=file-needs-copyright-header | [link](https://azure.github.io/azure-sdk/policies_opensource.html) |
|
||||
| client-method-name-no-double-underscore | Don't use method names prefixed with "__". | # pylint:disable=client-method-name-no-double-underscore | [link](https://azure.github.io/azure-sdk/python_introduction.html#public-vs-private) |
|
||||
| specify-parameter-names-in-call | Specify the parameter names when calling methods with more than 2 required positional parameters. e.g. self.get_foo(one, two, three=three, four=four, five=five) | # pylint:disable=specify-parameter-names-in-call | [link](https://azure.github.io/azure-sdk/python_introduction.html#method-signatures) |
|
||||
| connection-string-should-not-be-constructor-param | Remove connection string parameter from client constructor. Create a method that creates the client using a connection string. | # pylint:disable=connection-string-should-not-be-constructor-param | [link](https://azure.github.io/azure-sdk/python_design.html#constructors-and-factory-methods) |
|
||||
| package-name-incorrect | Change your distribution package name to only include dashes, e.g. azure-storage-file-share | # pylint:disable=package-name-incorrect | [link](https://azure.github.io/azure-sdk/python_implementation.html#packaging) |
|
||||
| client-suffix-needed | Service client types should use a "Client" suffix, e.g. BlobClient. | # pylint:disable=client-suffix-needed | [link](https://azure.github.io/azure-sdk/python_design.html#clients) |
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,12 @@
|
|||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name="pylint-guidelines-checker",
|
||||
version="0.0.1",
|
||||
url='http://github.com/Azure/azure-sdk-for-python',
|
||||
license='MIT License',
|
||||
description="A pylint plugin which enforces azure sdk guidelines.",
|
||||
author='Microsoft Corporation',
|
||||
author_email='azpysdkhelp@microsoft.com',
|
||||
py_modules=['pylint_guidelines_checker'],
|
||||
)
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,105 @@
|
|||
# coding=utf-8
|
||||
# --------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for
|
||||
# license information.
|
||||
#
|
||||
# Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
# Changes may cause incorrect behavior and will be lost if the code is
|
||||
# regenerated.
|
||||
# --------------------------------------------------------------------------
|
||||
|
||||
from msrest.service_client import SDKClient
|
||||
from msrest import Serializer, Deserializer
|
||||
|
||||
from azure.profiles import KnownProfiles, ProfileDefinition
|
||||
from azure.profiles.multiapiclient import MultiApiClientMixin
|
||||
from ._configuration import {{ client_name }}Configuration
|
||||
{% if mixin_operations %}from ._operations_mixin import {{ client_name }}OperationsMixin{% endif %}
|
||||
|
||||
|
||||
class {{ client_name }}({% if mixin_operations %}{{ client_name }}OperationsMixin, {% endif %}MultiApiClientMixin, SDKClient):
|
||||
"""{{ client_doc }}
|
||||
|
||||
This ready contains multiple API versions, to help you deal with all Azure clouds
|
||||
(Azure Stack, Azure Government, Azure China, etc.).
|
||||
By default, uses latest API version available on public Azure.
|
||||
For production, you should stick a particular api-version and/or profile.
|
||||
The profile sets a mapping between the operation group and an API version.
|
||||
The api-version parameter sets the default API version if the operation
|
||||
group is not described in the profile.
|
||||
|
||||
:ivar config: Configuration for client.
|
||||
:vartype config: {{ client_name }}Configuration
|
||||
|
||||
:param credentials: Credentials needed for the client to connect to Azure.
|
||||
:type credentials: :mod:`A msrestazure Credentials
|
||||
object<msrestazure.azure_active_directory>`
|
||||
{%- if has_subscription_id %}
|
||||
:param subscription_id: Subscription credentials which uniquely identify
|
||||
Microsoft Azure subscription. The subscription ID forms part of the URI
|
||||
for every service call.
|
||||
:type subscription_id: str
|
||||
{%- endif %}
|
||||
:param str api_version: API version to use if no profile is provided, or if
|
||||
missing in profile.
|
||||
:param str base_url: Service URL
|
||||
:param profile: A profile definition, from KnownProfiles to dict.
|
||||
:type profile: azure.profiles.KnownProfiles
|
||||
"""
|
||||
|
||||
DEFAULT_API_VERSION = '{{ last_api_version }}'
|
||||
_PROFILE_TAG = "{{ module_name }}.{{ client_name }}"
|
||||
LATEST_PROFILE = ProfileDefinition({
|
||||
_PROFILE_TAG: {
|
||||
None: DEFAULT_API_VERSION,
|
||||
{%- for rt_name, api_version in last_rt_list|dictsort %}
|
||||
'{{ rt_name }}': '{{ mod_to_api_version[api_version] }}',
|
||||
{%- endfor %}
|
||||
}},
|
||||
_PROFILE_TAG + " latest"
|
||||
)
|
||||
|
||||
def __init__(self, credentials{%- if has_subscription_id %}, subscription_id{% endif %}, api_version=None, base_url=None, profile=KnownProfiles.default):
|
||||
self.config = {{ client_name }}Configuration(credentials{%- if has_subscription_id %}, subscription_id{% endif %}, base_url)
|
||||
super({{ client_name }}, self).__init__(
|
||||
credentials,
|
||||
self.config,
|
||||
api_version=api_version,
|
||||
profile=profile
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _models_dict(cls, api_version):
|
||||
return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)}
|
||||
|
||||
@classmethod
|
||||
def models(cls, api_version=DEFAULT_API_VERSION):
|
||||
"""Module depends on the API version:
|
||||
{% for mod_api_version, api_version in mod_to_api_version|dictsort %}
|
||||
* {{ api_version }}: :mod:`{{ mod_api_version }}.models<{{ module_name }}.{{ mod_api_version }}.models>`
|
||||
{%- endfor %}
|
||||
"""
|
||||
{%- for mod_api_version, api_version in mod_to_api_version|dictsort %}
|
||||
{% if not loop.first %}el{% endif %}if api_version == '{{ api_version }}':
|
||||
from .{{ mod_api_version }} import models
|
||||
return models
|
||||
{%- endfor %}
|
||||
raise NotImplementedError("APIVersion {} is not available".format(api_version))
|
||||
{% for operation_name, available_apis in operations|dictsort %}
|
||||
@property
|
||||
def {{ operation_name }}(self):
|
||||
"""Instance depends on the API version:
|
||||
{% for api in available_apis %}
|
||||
* {{ mod_to_api_version[api[0]] }}: :class:`{{ api[1] }}<{{ module_name }}.{{ api[0] }}.operations.{{ api[1] }}>`
|
||||
{%- endfor %}
|
||||
"""
|
||||
api_version = self._get_api_version('{{ operation_name }}')
|
||||
{%- for api in available_apis %}
|
||||
{% if not loop.first %}el{% endif %}if api_version == '{{ mod_to_api_version[api[0]] }}':
|
||||
from .{{ api[0] }}.operations import {{ api[1] }} as OperationClass
|
||||
{%- endfor %}
|
||||
else:
|
||||
raise NotImplementedError("APIVersion {} is not available".format(api_version))
|
||||
return OperationClass(self._client, self.config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
||||
{% endfor %}
|
|
@ -0,0 +1,34 @@
|
|||
# coding=utf-8
|
||||
# --------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for
|
||||
# license information.
|
||||
#
|
||||
# Code generated by Microsoft (R) AutoRest Code Generator.
|
||||
# Changes may cause incorrect behavior and will be lost if the code is
|
||||
# regenerated.
|
||||
# --------------------------------------------------------------------------
|
||||
from msrest import Serializer, Deserializer
|
||||
|
||||
|
||||
class {{ client_name }}OperationsMixin(object):
|
||||
|
||||
{% for operation_name, metadata in mixin_operations|dictsort %}
|
||||
def {{ operation_name }}{{ metadata['signature'] }}:
|
||||
"""{{ metadata['doc'] }}
|
||||
"""
|
||||
api_version = self._get_api_version('{{ operation_name }}')
|
||||
{%- for api in metadata['available_apis'] %}
|
||||
{% if not loop.first %}el{% endif %}if api_version == '{{ mod_to_api_version[api] }}':
|
||||
from .{{ api }}.operations import {{ client_name }}OperationsMixin as OperationClass
|
||||
{%- endfor %}
|
||||
else:
|
||||
raise NotImplementedError("APIVersion {} is not available".format(api_version))
|
||||
mixin_instance = OperationClass()
|
||||
mixin_instance._client = self._client
|
||||
mixin_instance.config = self.config
|
||||
mixin_instance._serialize = Serializer(self._models_dict(api_version))
|
||||
mixin_instance._deserialize = Deserializer(self._models_dict(api_version))
|
||||
mixin_instance.api_version = api_version
|
||||
return mixin_instance.{{ operation_name }}({{ metadata['call'] }}, **operation_config)
|
||||
{% endfor %}
|
|
@ -0,0 +1,9 @@
|
|||
# coding=utf-8
|
||||
# --------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for
|
||||
# license information.
|
||||
# --------------------------------------------------------------------------
|
||||
{%- for mod_api_version in default_models %}
|
||||
from .{{ mod_api_version }}.models import *
|
||||
{%- endfor %}
|
|
@ -0,0 +1,19 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def trim(base_folder):
|
||||
base_folder = Path(base_folder)
|
||||
for aio_folder in Path(base_folder).glob("**/aio"):
|
||||
_LOGGER.info("Working on %s", aio_folder)
|
||||
shutil.rmtree(aio_folder) # Let it throw at worst
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
trim(sys.argv[1])
|
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"$schema": "https://openapistorageprod.blob.core.windows.net/sdkautomation/prod/schemas/swagger_to_sdk_config.schema.json",
|
||||
"meta": {
|
||||
"autorest_options": {
|
||||
"version": "V2",
|
||||
"use": "@microsoft.azure/autorest.python@4.0.73",
|
||||
"python": "",
|
||||
"sdkrel:python-sdks-folder": "./sdk/.",
|
||||
"multiapi": "",
|
||||
"keep-version-file" :"",
|
||||
"no-async": ""
|
||||
},
|
||||
"advanced_options": {
|
||||
"create_sdk_pull_requests": true,
|
||||
"sdk_generation_pull_request_base": "integration_branch"
|
||||
},
|
||||
"repotag": "azure-sdk-for-python",
|
||||
"version": "0.2.0"
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
-e ../azure-devtools
|
|
@ -0,0 +1,126 @@
|
|||
from contextlib import suppress
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, List
|
||||
|
||||
from jinja2 import Template, PackageLoader, Environment
|
||||
from .conf import read_conf, build_default_conf, CONF_NAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_CWD = Path(__file__).resolve().parent
|
||||
_TEMPLATE_PATH = _CWD / "template"
|
||||
|
||||
def build_config(config : Dict[str, Any]) -> Dict[str, str]:
|
||||
"""Will build the actual config for Jinja2, based on SDK config.
|
||||
"""
|
||||
result = config.copy()
|
||||
# Manage the classifier stable/beta
|
||||
is_stable = result.pop("is_stable", False)
|
||||
if is_stable:
|
||||
result["classifier"] = "Development Status :: 5 - Production/Stable"
|
||||
else:
|
||||
result["classifier"] = "Development Status :: 4 - Beta"
|
||||
# Manage the nspkg
|
||||
package_name = result["package_name"]
|
||||
result["package_nspkg"] = result.pop(
|
||||
"package_nspkg",
|
||||
package_name[:package_name.rindex('-')]+"-nspkg"
|
||||
)
|
||||
# ARM?
|
||||
result['is_arm'] = result.pop("is_arm", True)
|
||||
|
||||
# Do I need msrestazure for this package?
|
||||
result['need_msrestazure'] = result.pop("need_msrestazure", True)
|
||||
|
||||
# Pre-compute some Jinja variable that are complicated to do inside the templates
|
||||
package_parts = result["package_nspkg"][:-len('-nspkg')].split('-')
|
||||
result['nspkg_names'] = [
|
||||
".".join(package_parts[:i+1])
|
||||
for i in range(len(package_parts))
|
||||
]
|
||||
result['init_names'] = [
|
||||
"/".join(package_parts[:i+1])+"/__init__.py"
|
||||
for i in range(len(package_parts))
|
||||
]
|
||||
|
||||
# Return result
|
||||
return result
|
||||
|
||||
|
||||
def build_packaging(output_folder: str, gh_token: Optional[str]=None, jenkins: bool = False, packages: List[str]=None, build_conf: bool = False) -> None:
|
||||
package_names = set(packages) or set()
|
||||
if jenkins:
|
||||
sdk_id = os.environ["ghprbGhRepository"]
|
||||
pr_number = int(os.environ["ghprbPullId"])
|
||||
|
||||
from github import Github
|
||||
con = Github(gh_token)
|
||||
repo = con.get_repo(sdk_id)
|
||||
sdk_pr = repo.get_pull(pr_number)
|
||||
# "get_files" of Github only download the first 300 files. Might not be enough.
|
||||
package_names |= {f.filename.split('/')[0] for f in sdk_pr.get_files() if f.filename.startswith("azure")}
|
||||
|
||||
if not package_names:
|
||||
raise ValueError("Was unable to find out the package names.")
|
||||
|
||||
for package_name in package_names:
|
||||
build_packaging_by_package_name(package_name, output_folder, build_conf)
|
||||
|
||||
|
||||
def build_packaging_by_package_name(package_name: str, output_folder: str, build_conf: bool = False) -> None:
|
||||
_LOGGER.info("Building template %s", package_name)
|
||||
package_folder = Path(output_folder) / Path(package_name)
|
||||
|
||||
if build_conf:
|
||||
build_default_conf(package_folder, package_name)
|
||||
|
||||
conf = read_conf(package_folder)
|
||||
if not conf:
|
||||
raise ValueError("Create a {} file before calling this script".format(package_folder / CONF_NAME))
|
||||
|
||||
if not conf.get("auto_update", True):
|
||||
_LOGGER.info(f"Package {package_name} has no auto-packaging update enabled")
|
||||
return
|
||||
|
||||
env = Environment(
|
||||
loader=PackageLoader('packaging_tools', 'templates'),
|
||||
keep_trailing_newline=True
|
||||
)
|
||||
conf = build_config(conf)
|
||||
|
||||
for template_name in env.list_templates():
|
||||
future_filepath = Path(output_folder) / package_name / template_name
|
||||
|
||||
# Might decide to make it more generic one day
|
||||
if template_name == "CHANGELOG.md" and future_filepath.exists():
|
||||
_LOGGER.info("Skipping CHANGELOG.md template, since a previous one was found")
|
||||
# Never overwirte the ChangeLog
|
||||
continue
|
||||
|
||||
template = env.get_template(template_name)
|
||||
result = template.render(**conf)
|
||||
|
||||
# __init__.py is a weird one
|
||||
if template_name == "__init__.py":
|
||||
split_package_name = package_name.split("-")[:-1]
|
||||
for i in range(len(split_package_name)):
|
||||
init_path = Path(output_folder).joinpath(
|
||||
package_name,
|
||||
*split_package_name[:i+1],
|
||||
template_name
|
||||
)
|
||||
with open(init_path, "w") as fd:
|
||||
fd.write(result)
|
||||
|
||||
continue
|
||||
|
||||
with open(future_filepath, "w") as fd:
|
||||
fd.write(result)
|
||||
# azure_bdist_wheel had been removed, but need to delete it manually
|
||||
with suppress(FileNotFoundError):
|
||||
(Path(output_folder) / package_name / "azure_bdist_wheel.py").unlink()
|
||||
|
||||
|
||||
_LOGGER.info("Template done %s", package_name)
|
|
@ -0,0 +1,54 @@
|
|||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
from . import build_packaging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_epilog="""This script will automatically build the TOML configuration file with default value if it doesn't exist.
|
||||
"""
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Packaging tools for Azure SDK for Python',
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
epilog=_epilog
|
||||
)
|
||||
parser.add_argument('--output', '-o',
|
||||
dest='output', default='.',
|
||||
help='Output dir, should be SDK repo folder. [default: %(default)s]')
|
||||
parser.add_argument("--debug",
|
||||
dest="debug", action="store_true",
|
||||
help="Verbosity in DEBUG mode")
|
||||
parser.add_argument("--build-conf",
|
||||
dest="build_conf", action="store_true",
|
||||
help="Build a default TOML file, with package name, fake pretty name, as beta package and no doc page. Do nothing if the file exists, remove manually the file if needed.")
|
||||
parser.add_argument("--jenkins",
|
||||
dest="jenkins", action="store_true",
|
||||
help="In Jenkins mode, try to find what to generate from Jenkins env variables. Package names are then optional.")
|
||||
parser.add_argument('package_names', nargs='*', help='The package name.')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
main_logger = logging.getLogger()
|
||||
logging.basicConfig()
|
||||
main_logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
|
||||
|
||||
if not args.package_names and not args.jenkins:
|
||||
raise ValueError("At least one package name or Jenkins mode is required")
|
||||
|
||||
try:
|
||||
build_packaging(
|
||||
args.output,
|
||||
os.environ.get("GH_TOKEN", None),
|
||||
args.jenkins,
|
||||
args.package_names,
|
||||
build_conf=args.build_conf
|
||||
)
|
||||
except Exception as err:
|
||||
if args.debug:
|
||||
_LOGGER.exception(err)
|
||||
else:
|
||||
_LOGGER.critical(err)
|
||||
sys.exit(1)
|
|
@ -0,0 +1,201 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
from json_delta import diff
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
class ChangeLog:
|
||||
def __init__(self, old_report, new_report):
|
||||
self.features = []
|
||||
self.breaking_changes = []
|
||||
self._old_report = old_report
|
||||
self._new_report = new_report
|
||||
|
||||
def build_md(self):
|
||||
buffer = []
|
||||
if self.features:
|
||||
buffer.append("**Features**")
|
||||
buffer.append("")
|
||||
for feature in self.features:
|
||||
buffer.append(" - "+feature)
|
||||
buffer.append("")
|
||||
if self.breaking_changes:
|
||||
buffer.append("**Breaking changes**")
|
||||
buffer.append("")
|
||||
for breaking_change in self.breaking_changes:
|
||||
buffer.append(" - "+breaking_change)
|
||||
return "\n".join(buffer).strip()
|
||||
|
||||
@staticmethod
|
||||
def _unpack_diff_entry(diff_entry):
|
||||
return diff_entry[0], len(diff_entry) == 1
|
||||
|
||||
def operation(self, diff_entry):
|
||||
path, is_deletion = self._unpack_diff_entry(diff_entry)
|
||||
|
||||
# Is this a new operation group?
|
||||
_, operation_name, *remaining_path = path
|
||||
if not remaining_path:
|
||||
if is_deletion:
|
||||
self.breaking_changes.append(_REMOVE_OPERATION_GROUP.format(operation_name))
|
||||
else:
|
||||
self.features.append(_ADD_OPERATION_GROUP.format(operation_name))
|
||||
return
|
||||
|
||||
_, *remaining_path = remaining_path
|
||||
if not remaining_path:
|
||||
# Not common, but this means this has changed a lot. Compute the list manually
|
||||
old_ops_name = list(self._old_report["operations"][operation_name]["functions"])
|
||||
new_ops_name = list(self._new_report["operations"][operation_name]["functions"])
|
||||
for removed_function in set(old_ops_name) - set(new_ops_name):
|
||||
self.breaking_changes.append(_REMOVE_OPERATION.format(operation_name, removed_function))
|
||||
for added_function in set(new_ops_name) - set(old_ops_name):
|
||||
self.features.append(_ADD_OPERATION.format(operation_name, added_function))
|
||||
return
|
||||
|
||||
# Is this a new operation, inside a known operation group?
|
||||
function_name, *remaining_path = remaining_path
|
||||
if not remaining_path:
|
||||
if is_deletion:
|
||||
self.breaking_changes.append(_REMOVE_OPERATION.format(operation_name, function_name))
|
||||
else:
|
||||
self.features.append(_ADD_OPERATION.format(operation_name, function_name))
|
||||
return
|
||||
|
||||
if remaining_path[0] == "metadata":
|
||||
# Ignore change in metadata for now, they have no impact
|
||||
return
|
||||
|
||||
# So method signaure changed. Be vague for now
|
||||
self.breaking_changes.append(_SIGNATURE_CHANGE.format(operation_name, function_name))
|
||||
|
||||
|
||||
def models(self, diff_entry):
|
||||
path, is_deletion = self._unpack_diff_entry(diff_entry)
|
||||
|
||||
# Is this a new model?
|
||||
_, mtype, *remaining_path = path
|
||||
if not remaining_path:
|
||||
# Seen once in Network, because exceptions were added. Bypass
|
||||
return
|
||||
model_name, *remaining_path = remaining_path
|
||||
if not remaining_path:
|
||||
# A new model or a model deletion is not very interesting by itself
|
||||
# since it usually means that there is a new operation
|
||||
#
|
||||
# We might miss some discrimanator new sub-classes however
|
||||
return
|
||||
|
||||
# That's a model signature change
|
||||
if mtype in ["enums", "exceptions"]:
|
||||
# Don't change log anything for Enums for now
|
||||
return
|
||||
|
||||
_, *remaining_path = remaining_path
|
||||
if not remaining_path: # This means massive signature changes, that we don't even try to list them
|
||||
self.breaking_changes.append(_MODEL_SIGNATURE_CHANGE.format(model_name))
|
||||
return
|
||||
|
||||
# This is a real model
|
||||
parameter_name, *remaining_path = remaining_path
|
||||
is_required = lambda report, model_name, param_name: report["models"]["models"][model_name]["parameters"][param_name]["properties"]["required"]
|
||||
if not remaining_path:
|
||||
if is_deletion:
|
||||
self.breaking_changes.append(_MODEL_PARAM_DELETE.format(model_name, parameter_name))
|
||||
else:
|
||||
# This one is tough, if the new parameter is "required",
|
||||
# then it's breaking. If not, it's a feature
|
||||
if is_required(self._new_report, model_name, parameter_name):
|
||||
self.breaking_changes.append(_MODEL_PARAM_ADD_REQUIRED.format(model_name, parameter_name))
|
||||
else:
|
||||
self.features.append(_MODEL_PARAM_ADD.format(model_name, parameter_name))
|
||||
return
|
||||
|
||||
# The parameter already exists
|
||||
new_is_required = is_required(self._new_report, model_name, parameter_name)
|
||||
old_is_required = is_required(self._old_report, model_name, parameter_name)
|
||||
|
||||
if new_is_required and not old_is_required:
|
||||
# This shift from optional to required
|
||||
self.breaking_changes.append(_MODEL_PARAM_CHANGE_REQUIRED.format(parameter_name, model_name))
|
||||
return
|
||||
|
||||
|
||||
## Features
|
||||
_ADD_OPERATION_GROUP = "Added operation group {}"
|
||||
_ADD_OPERATION = "Added operation {}.{}"
|
||||
_MODEL_PARAM_ADD = "Model {} has a new parameter {}"
|
||||
|
||||
## Breaking Changes
|
||||
_REMOVE_OPERATION_GROUP = "Removed operation group {}"
|
||||
_REMOVE_OPERATION = "Removed operation {}.{}"
|
||||
_SIGNATURE_CHANGE = "Operation {}.{} has a new signature"
|
||||
_MODEL_SIGNATURE_CHANGE = "Model {} has a new signature"
|
||||
_MODEL_PARAM_DELETE = "Model {} no longer has parameter {}"
|
||||
_MODEL_PARAM_ADD_REQUIRED = "Model {} has a new required parameter {}"
|
||||
_MODEL_PARAM_CHANGE_REQUIRED = "Parameter {} of model {} is now required"
|
||||
|
||||
def build_change_log(old_report, new_report):
|
||||
change_log = ChangeLog(old_report, new_report)
|
||||
|
||||
result = diff(old_report, new_report)
|
||||
|
||||
for diff_line in result:
|
||||
# Operations
|
||||
if diff_line[0][0] == "operations":
|
||||
change_log.operation(diff_line)
|
||||
else:
|
||||
change_log.models(diff_line)
|
||||
|
||||
return change_log
|
||||
|
||||
def get_report_from_parameter(input_parameter):
|
||||
if ":" in input_parameter:
|
||||
package_name, version = input_parameter.split(":")
|
||||
from .code_report import main
|
||||
result = main(
|
||||
package_name,
|
||||
version=version if version not in ["pypi", "latest"] else None,
|
||||
last_pypi=version == "pypi"
|
||||
)
|
||||
if not result:
|
||||
raise ValueError("Was not able to build a report")
|
||||
if len(result) == 1:
|
||||
with open(result[0], "r") as fd:
|
||||
return json.load(fd)
|
||||
|
||||
raise NotImplementedError("Multi-api changelog not yet implemented")
|
||||
|
||||
with open(input_parameter, "r") as fd:
|
||||
return json.load(fd)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='ChangeLog computation',
|
||||
)
|
||||
parser.add_argument('base',
|
||||
help='Base. Could be a file path, or <package_name>:<version>. Version can be pypi, latest or a real version')
|
||||
parser.add_argument('latest',
|
||||
help='Latest. Could be a file path, or <package_name>:<version>. Version can be pypi, latest or a real version')
|
||||
|
||||
parser.add_argument("--debug",
|
||||
dest="debug", action="store_true",
|
||||
help="Verbosity in DEBUG mode")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG if args.debug else logging.INFO)
|
||||
|
||||
old_report = get_report_from_parameter(args.base)
|
||||
new_report = get_report_from_parameter(args.latest)
|
||||
|
||||
# result = diff(old_report, new_report)
|
||||
# with open("result.json", "w") as fd:
|
||||
# json.dump(result, fd)
|
||||
|
||||
change_log = build_change_log(old_report, new_report)
|
||||
print(change_log.build_md())
|
|
@ -0,0 +1,298 @@
|
|||
import importlib
|
||||
import inspect
|
||||
import json
|
||||
import logging
|
||||
import glob
|
||||
import os
|
||||
import pkgutil
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
import sys
|
||||
import types
|
||||
from typing import Dict, Any, Optional
|
||||
|
||||
# Because I'm subprocessing myself, I need to do weird thing as import.
|
||||
try:
|
||||
# If I'm started as a module __main__
|
||||
from .venvtools import create_venv_with_package
|
||||
except (ModuleNotFoundError, ImportError) as e:
|
||||
# If I'm started by my main directly
|
||||
from venvtools import create_venv_with_package
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def parse_input(input_parameter):
|
||||
"""From a syntax like package_name#submodule, build a package name
|
||||
and complete module name.
|
||||
"""
|
||||
split_package_name = input_parameter.split('#')
|
||||
package_name = split_package_name[0]
|
||||
module_name = package_name.replace("-", ".")
|
||||
if len(split_package_name) >= 2:
|
||||
module_name = ".".join([module_name, split_package_name[1]])
|
||||
return package_name, module_name
|
||||
|
||||
def create_empty_report():
|
||||
return {
|
||||
"models": {
|
||||
"enums": {},
|
||||
"exceptions": {},
|
||||
"models": {}
|
||||
},
|
||||
"operations": {}
|
||||
}
|
||||
|
||||
def create_report(module_name: str) -> Dict[str, Any]:
|
||||
module_to_generate = importlib.import_module(module_name)
|
||||
|
||||
report = create_empty_report()
|
||||
|
||||
# Look for models first
|
||||
model_names = [model_name for model_name in dir(module_to_generate.models) if model_name[0].isupper()]
|
||||
for model_name in model_names:
|
||||
model_cls = getattr(module_to_generate.models, model_name)
|
||||
if hasattr(model_cls, "_attribute_map"):
|
||||
report["models"]["models"][model_name] = create_model_report(model_cls)
|
||||
elif issubclass(model_cls, Exception): # If not, might be an exception
|
||||
report["models"]["exceptions"][model_name] = create_model_report(model_cls)
|
||||
else:
|
||||
report["models"]["enums"][model_name] = create_model_report(model_cls)
|
||||
# Look for operation groups
|
||||
try:
|
||||
operations_classes = [op_name for op_name in dir(module_to_generate.operations) if op_name[0].isupper()]
|
||||
except AttributeError:
|
||||
# This guy has no "operations", this is possible (Cognitive Services). Just skip it then.
|
||||
operations_classes = []
|
||||
|
||||
for op_name in operations_classes:
|
||||
op_content = {'name': op_name}
|
||||
op_cls = getattr(module_to_generate.operations, op_name)
|
||||
for op_attr_name in dir(op_cls):
|
||||
op_attr = getattr(op_cls, op_attr_name)
|
||||
if isinstance(op_attr, types.FunctionType) and not op_attr_name.startswith("_"):
|
||||
# Keep it
|
||||
func_content = create_report_from_func(op_attr)
|
||||
op_content.setdefault("functions", {})[op_attr_name] = func_content
|
||||
report['operations'][op_name] = op_content
|
||||
|
||||
return report
|
||||
|
||||
def create_model_report(model_cls):
|
||||
result = {
|
||||
'name': model_cls.__name__,
|
||||
}
|
||||
# If _attribute_map, it's a model
|
||||
if hasattr(model_cls, "_attribute_map"):
|
||||
result['type'] = "Model"
|
||||
for attribute, conf in model_cls._attribute_map.items():
|
||||
attribute_validation = getattr(model_cls, "_validation", {}).get(attribute, {})
|
||||
|
||||
result.setdefault('parameters', {})[attribute] = {
|
||||
'name': attribute,
|
||||
'properties': {
|
||||
'type': conf['type'],
|
||||
'required': attribute_validation.get('required', False),
|
||||
'readonly': attribute_validation.get('readonly', False)
|
||||
}
|
||||
}
|
||||
elif issubclass(model_cls, Exception): # If not, might be an exception
|
||||
result['type'] = "Exception"
|
||||
else: # If not, it's an enum
|
||||
result['type'] = "Enum"
|
||||
result['values'] = list(model_cls.__members__)
|
||||
|
||||
return result
|
||||
|
||||
def create_report_from_func(function_attr):
|
||||
func_content = {
|
||||
'name': function_attr.__name__,
|
||||
'metadata': getattr(function_attr, "metadata", {}),
|
||||
'parameters': []
|
||||
}
|
||||
signature = inspect.signature(function_attr)
|
||||
for parameter_name in signature.parameters:
|
||||
if parameter_name == "self":
|
||||
continue
|
||||
if parameter_name =="custom_headers":
|
||||
break # We reach Autorest generic
|
||||
parameter = signature.parameters[parameter_name]
|
||||
func_content["parameters"].append({
|
||||
'name': parameter.name,
|
||||
})
|
||||
return func_content
|
||||
|
||||
# given an input of a name, we need to return the appropriate relative diff between the sdk_root and the actual package directory
|
||||
def resolve_package_directory(package_name):
|
||||
packages = [os.path.dirname(p) for p in (glob.glob('{}/setup.py'.format(package_name)) + glob.glob('sdk/*/{}/setup.py'.format(package_name)))]
|
||||
|
||||
if len(packages) > 1:
|
||||
print('There should only be a single package matched in either repository structure. The following were found: {}'.format(packages))
|
||||
sys.exit(1)
|
||||
|
||||
return packages[0]
|
||||
|
||||
def merge_report(report_paths):
|
||||
"""Merge report on the given paths list.
|
||||
"""
|
||||
if len(report_paths) == 1:
|
||||
raise ValueError("Doesn't make sense to merge a report if there is only one report....")
|
||||
|
||||
merged_report = create_empty_report()
|
||||
for report in sorted(report_paths):
|
||||
with report.open() as report_fd:
|
||||
report_json = json.load(report_fd)
|
||||
|
||||
merged_report["models"]["enums"].update(report_json["models"]["enums"])
|
||||
merged_report["models"]["exceptions"].update(report_json["models"]["exceptions"])
|
||||
merged_report["models"]["models"].update(report_json["models"]["models"])
|
||||
merged_report["operations"].update(report_json["operations"])
|
||||
return merged_report
|
||||
|
||||
def main(input_parameter: str, version: Optional[str] = None, no_venv: bool = False, pypi: bool = False, last_pypi: bool = False, output: str = None):
|
||||
package_name, module_name = parse_input(input_parameter)
|
||||
path_to_package = resolve_package_directory(package_name)
|
||||
|
||||
if (version or pypi or last_pypi) and not no_venv:
|
||||
if version:
|
||||
versions = [version]
|
||||
else:
|
||||
_LOGGER.info(f"Download versions of {package_name} on PyPI")
|
||||
from pypi_tools.pypi import PyPIClient
|
||||
client = PyPIClient()
|
||||
versions = [str(v) for v in client.get_ordered_versions(package_name)]
|
||||
_LOGGER.info(f"Got {versions}")
|
||||
if last_pypi:
|
||||
_LOGGER.info(f"Only keep last PyPI version")
|
||||
versions = [versions[-1]]
|
||||
|
||||
for version in versions:
|
||||
_LOGGER.info(f"Installing version {version} of {package_name} in a venv")
|
||||
with create_venv_with_package([f"{package_name}=={version}"]) as venv:
|
||||
args = [
|
||||
venv.env_exe,
|
||||
__file__,
|
||||
"--no-venv",
|
||||
"--version",
|
||||
version,
|
||||
input_parameter
|
||||
]
|
||||
if output is not None:
|
||||
args.append("--output=" + output)
|
||||
try:
|
||||
subprocess.check_call(args)
|
||||
except subprocess.CalledProcessError:
|
||||
# If it fail, just assume this version is too old to get an Autorest report
|
||||
_LOGGER.warning(f"Version {version} seems to be too old to build a report (probably not Autorest based)")
|
||||
# Files have been written by the subprocess
|
||||
return
|
||||
|
||||
modules = find_autorest_generated_folder(module_name)
|
||||
result = []
|
||||
version = version or "latest"
|
||||
output_folder = Path(path_to_package) / Path("code_reports") / Path(version)
|
||||
output_folder.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for module_name in modules:
|
||||
_LOGGER.info(f"Working on {module_name}")
|
||||
|
||||
report = create_report(module_name)
|
||||
|
||||
module_for_path = get_sub_module_part(package_name, module_name)
|
||||
if module_for_path:
|
||||
output_filename = output_folder / Path(module_for_path+".json")
|
||||
else:
|
||||
if output is not None:
|
||||
output_filename = output
|
||||
else:
|
||||
output_filename = output_folder / Path("report.json")
|
||||
|
||||
with open(output_filename, "w") as fd:
|
||||
json.dump(report, fd, indent=2)
|
||||
_LOGGER.info(f"Report written to {output_filename}")
|
||||
result.append(output_filename)
|
||||
|
||||
if len(result) > 1:
|
||||
merged_report = merge_report(result)
|
||||
if output is not None:
|
||||
output_filename = output
|
||||
else:
|
||||
output_filename = output_folder / Path("merged_report.json")
|
||||
with open(output_filename, "w") as fd:
|
||||
json.dump(merged_report, fd, indent=2)
|
||||
_LOGGER.info(f"Merged report written to {output_filename}")
|
||||
|
||||
return result
|
||||
|
||||
def find_autorest_generated_folder(module_prefix="azure"):
|
||||
"""Find all Autorest generated code in that module prefix.
|
||||
This actually looks for a "models" package only (not file). We could be smarter if necessary.
|
||||
"""
|
||||
_LOGGER.info(f"Looking for Autorest generated package in {module_prefix}")
|
||||
|
||||
# Manually skip some namespaces for now
|
||||
if module_prefix in ["azure.cli", "azure.storage", "azure.servicemanagement", "azure.servicebus"]:
|
||||
_LOGGER.info(f"Skip {module_prefix}")
|
||||
return []
|
||||
|
||||
result = []
|
||||
try:
|
||||
_LOGGER.debug(f"Try {module_prefix}")
|
||||
model_module = importlib.import_module(".models", module_prefix)
|
||||
# If not exception, we MIGHT have found it, but cannot be a file.
|
||||
# Keep continue to try to break it, file module have no __path__
|
||||
model_module.__path__
|
||||
_LOGGER.info(f"Found {module_prefix}")
|
||||
result.append(module_prefix)
|
||||
except (ModuleNotFoundError, AttributeError):
|
||||
# No model, might dig deeper
|
||||
prefix_module = importlib.import_module(module_prefix)
|
||||
for _, sub_package, ispkg in pkgutil.iter_modules(prefix_module.__path__, module_prefix+"."):
|
||||
if ispkg:
|
||||
result += find_autorest_generated_folder(sub_package)
|
||||
return result
|
||||
|
||||
|
||||
def get_sub_module_part(package_name, module_name):
|
||||
"""Assuming package is azure-mgmt-compute and module name is azure.mgmt.compute.v2018-08-01
|
||||
will return v2018-08-01
|
||||
"""
|
||||
sub_module_from_package = package_name.replace("-", ".")
|
||||
if not module_name.startswith(sub_module_from_package):
|
||||
_LOGGER.warning(f"Submodule {module_name} does not start with package name {package_name}")
|
||||
return
|
||||
return module_name[len(sub_module_from_package)+1:]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Code fingerprint building',
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
parser.add_argument('package_name',
|
||||
help='Package name.')
|
||||
parser.add_argument('--version', '-v',
|
||||
dest='version',
|
||||
help='The version of the package you want. By default, latest and current branch.')
|
||||
parser.add_argument('--no-venv',
|
||||
dest='no_venv', action="store_true",
|
||||
help="If version is provided, this will assume the current accessible package is already this version. You should probably not use it.")
|
||||
parser.add_argument('--pypi',
|
||||
dest='pypi', action="store_true",
|
||||
help="If provided, build report for all versions on pypi of this package.")
|
||||
parser.add_argument('--last-pypi',
|
||||
dest='last_pypi', action="store_true",
|
||||
help="If provided, build report for last version on pypi of this package.")
|
||||
parser.add_argument("--debug",
|
||||
dest="debug", action="store_true",
|
||||
help="Verbosity in DEBUG mode")
|
||||
parser.add_argument("--output",
|
||||
dest="output",
|
||||
help="Override output path.")
|
||||
args = parser.parse_args()
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG if args.debug else logging.INFO)
|
||||
|
||||
main(args.package_name, args.version, args.no_venv, args.pypi, args.last_pypi, args.output)
|
|
@ -0,0 +1,43 @@
|
|||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any
|
||||
|
||||
import pytoml as toml
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_NAME = "sdk_packaging.toml"
|
||||
_SECTION = "packaging"
|
||||
|
||||
# Default conf
|
||||
_CONFIG = {
|
||||
"package_name": "packagename",
|
||||
"package_nspkg": "packagenspkg",
|
||||
"package_pprint_name": "MyService Management",
|
||||
"package_doc_id": "",
|
||||
"is_stable": False,
|
||||
"is_arm": True,
|
||||
"need_msrestazure": True
|
||||
}
|
||||
|
||||
def read_conf(folder: Path) -> Dict[str, Any]:
|
||||
conf_path = folder / CONF_NAME
|
||||
if not conf_path.exists():
|
||||
return {}
|
||||
|
||||
with open(conf_path, "rb") as fd:
|
||||
return toml.load(fd)[_SECTION]
|
||||
|
||||
def build_default_conf(folder: Path, package_name: str) -> None:
|
||||
conf_path = folder / CONF_NAME
|
||||
if conf_path.exists():
|
||||
_LOGGER.info("Skipping default conf since the file exists")
|
||||
return
|
||||
|
||||
_LOGGER.info("Build default conf for %s", package_name)
|
||||
conf = {_SECTION: _CONFIG.copy()}
|
||||
conf[_SECTION]["package_name"] = package_name
|
||||
conf[_SECTION]["package_nspkg"] = package_name[:package_name.rindex('-')]+"-nspkg"
|
||||
|
||||
with open(conf_path, "w") as fd:
|
||||
toml.dump(conf, fd)
|
|
@ -0,0 +1,141 @@
|
|||
"""This file is specific to Azure SDK for Python and should be split somewhere else."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from github import Github
|
||||
|
||||
from azure_devtools.ci_tools.github_tools import (
|
||||
manage_git_folder,
|
||||
DashboardCommentableObject
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_SDK_FOLDER_RE = re.compile(r"^(sdk/[\w-]+)/(azure[\w-]+)/", re.ASCII)
|
||||
|
||||
_STORAGE_ACCOUNT = "http://azuresdkinfrajobstore1.blob.core.windows.net/azure/azure-sdk-for-python/pullrequests/{prnumber}/dist/{file}"
|
||||
|
||||
def execute_simple_command(cmd_line, cwd=None, shell=False, env=None):
|
||||
try:
|
||||
process = subprocess.Popen(cmd_line,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
cwd=cwd,
|
||||
shell=shell,
|
||||
env=env)
|
||||
output_buffer = []
|
||||
for line in process.stdout:
|
||||
output_buffer.append(line.rstrip())
|
||||
_LOGGER.info(output_buffer[-1])
|
||||
process.wait()
|
||||
output = "\n".join(output_buffer)
|
||||
if process.returncode:
|
||||
raise subprocess.CalledProcessError(
|
||||
process.returncode,
|
||||
cmd_line,
|
||||
output
|
||||
)
|
||||
return output
|
||||
except Exception as err:
|
||||
_LOGGER.error(err)
|
||||
raise
|
||||
else:
|
||||
_LOGGER.info("Return code: %s", process.returncode)
|
||||
|
||||
def get_package_names(sdk_pr):
|
||||
files = [one_file.filename for one_file in sdk_pr.get_files() if one_file.status not in ['removed']]
|
||||
# "get_files" of Github only download the first 300 files. Might not be enough.
|
||||
package_names = {('.', f.split('/')[0]) for f in files if f.startswith("azure")}
|
||||
# Handle the SDK folder as well
|
||||
matches = {_SDK_FOLDER_RE.search(f) for f in files}
|
||||
package_names.update({match.groups() for match in matches if match is not None})
|
||||
return package_names
|
||||
|
||||
def build_package_from_pr_number(gh_token, sdk_id, pr_number, output_folder, *, with_comment=False):
|
||||
"""Will clone the given PR branch and vuild the package with the given name."""
|
||||
|
||||
con = Github(gh_token)
|
||||
repo = con.get_repo(sdk_id)
|
||||
sdk_pr = repo.get_pull(pr_number)
|
||||
package_names = get_package_names(sdk_pr)
|
||||
|
||||
absolute_output_folder = Path(output_folder).resolve()
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir, \
|
||||
manage_git_folder(gh_token, Path(temp_dir) / Path("sdk"), sdk_id, pr_number=pr_number) as sdk_repo_root:
|
||||
|
||||
for _, package_name in package_names:
|
||||
_LOGGER.debug("Build {}".format(package_name))
|
||||
execute_simple_command(
|
||||
["python", "./build_package.py", "--dest", str(absolute_output_folder), package_name],
|
||||
cwd=sdk_repo_root
|
||||
)
|
||||
_LOGGER.debug("Build finished: {}".format(package_name))
|
||||
|
||||
if with_comment:
|
||||
files = [f.name for f in absolute_output_folder.iterdir()]
|
||||
comment_message = None
|
||||
dashboard = DashboardCommentableObject(sdk_pr, "(message created by the CI based on PR content)")
|
||||
try:
|
||||
installation_message = build_installation_message(sdk_pr)
|
||||
download_message = build_download_message(sdk_pr, files)
|
||||
comment_message = installation_message + "\n\n" + download_message
|
||||
dashboard.create_comment(comment_message)
|
||||
except Exception:
|
||||
_LOGGER.critical("Unable to do PR comment:\n%s", comment_message)
|
||||
|
||||
def build_download_message(sdk_pr, files):
|
||||
if not files:
|
||||
return ""
|
||||
message = "# Direct download\n\nYour files can be directly downloaded here:\n\n"
|
||||
for filename in files:
|
||||
message += "- [{}]({})\n".format(
|
||||
filename,
|
||||
_STORAGE_ACCOUNT.format(prnumber=sdk_pr.number, file=filename)
|
||||
)
|
||||
return message
|
||||
|
||||
def build_installation_message(sdk_pr):
|
||||
package_names = get_package_names(sdk_pr)
|
||||
|
||||
result = ["# Installation instruction"]
|
||||
for _, package in package_names:
|
||||
result.append("## Package {}".format(package))
|
||||
result.append(pr_message_for_package(sdk_pr, package))
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
def pr_message_for_package(sdk_pr, package_name):
|
||||
git_path = '"git+{}@{}#egg={}&subdirectory={}"'.format(
|
||||
sdk_pr.head.repo.html_url,
|
||||
sdk_pr.head.ref,
|
||||
package_name,
|
||||
package_name
|
||||
)
|
||||
|
||||
pip_install = 'pip install {}'
|
||||
pip_wheel = 'pip wheel --no-deps {}'
|
||||
|
||||
pr_body = "You can install the package `{}` of this PR using the following command:\n\t`{}`".format(
|
||||
package_name,
|
||||
pip_install.format(git_path)
|
||||
)
|
||||
|
||||
pr_body += "\n\n"
|
||||
|
||||
pr_body += "You can build a wheel to distribute for test using the following command:\n\t`{}`".format(
|
||||
pip_wheel.format(git_path)
|
||||
)
|
||||
|
||||
pr_body += "\n\n"
|
||||
pr_body += "If you have a local clone of this repository, you can also do:\n\n"
|
||||
pr_body += "- `git checkout {}`\n".format(sdk_pr.head.ref)
|
||||
pr_body += "- `pip install -e ./{}`\n".format(package_name)
|
||||
pr_body += "\n\n"
|
||||
pr_body += "Or build a wheel file to distribute for testing:\n\n"
|
||||
pr_body += "- `git checkout {}`\n".format(sdk_pr.head.ref)
|
||||
pr_body += "- `pip wheel --no-deps ./{}`\n".format(package_name)
|
||||
return pr_body
|
|
@ -0,0 +1,50 @@
|
|||
import argparse
|
||||
import logging
|
||||
import os
|
||||
|
||||
from packaging_tools.drop_tools import build_package_from_pr_number
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def generate_main():
|
||||
"""Main method"""
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Build package.',
|
||||
formatter_class=argparse.RawTextHelpFormatter)
|
||||
parser.add_argument('--pr-number', '-p',
|
||||
dest='pr_number', type=int, required=True,
|
||||
help='PR number')
|
||||
parser.add_argument('--repo', '-r',
|
||||
dest='repo_id', default="Azure/azure-sdk-for-python",
|
||||
help='Repo id. [default: %(default)s]')
|
||||
parser.add_argument("--with-comment",
|
||||
dest="with_comment", action="store_true",
|
||||
help="Do a comment to the original PR with info.")
|
||||
parser.add_argument("-v", "--verbose",
|
||||
dest="verbose", action="store_true",
|
||||
help="Verbosity in INFO mode")
|
||||
parser.add_argument("--debug",
|
||||
dest="debug", action="store_true",
|
||||
help="Verbosity in DEBUG mode")
|
||||
|
||||
parser.add_argument('--output-folder', '-o',
|
||||
dest='output_folder', default='.',
|
||||
help='Output folder for package. [default: %(default)s]')
|
||||
|
||||
args = parser.parse_args()
|
||||
main_logger = logging.getLogger()
|
||||
if args.verbose or args.debug:
|
||||
logging.basicConfig()
|
||||
main_logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
|
||||
|
||||
build_package_from_pr_number(
|
||||
os.environ.get("GH_TOKEN", None),
|
||||
args.repo_id,
|
||||
args.pr_number,
|
||||
args.output_folder,
|
||||
with_comment=args.with_comment
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_main()
|
|
@ -0,0 +1,133 @@
|
|||
import argparse
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
|
||||
from .swaggertosdk.SwaggerToSdkNewCLI import (
|
||||
build_project,
|
||||
)
|
||||
from .swaggertosdk.SwaggerToSdkCore import (
|
||||
CONFIG_FILE,
|
||||
read_config,
|
||||
solve_relative_path,
|
||||
extract_conf_from_readmes,
|
||||
get_input_paths,
|
||||
get_repo_tag_meta,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate(config_path, sdk_folder, project_pattern, readme, restapi_git_folder, autorest_bin=None, force_generation=False):
|
||||
|
||||
sdk_folder = Path(sdk_folder).expanduser()
|
||||
config = read_config(sdk_folder, config_path)
|
||||
|
||||
global_conf = config["meta"]
|
||||
repotag = get_repo_tag_meta(global_conf)
|
||||
global_conf["autorest_options"] = solve_relative_path(global_conf.get("autorest_options", {}), sdk_folder)
|
||||
global_conf["envs"] = solve_relative_path(global_conf.get("envs", {}), sdk_folder)
|
||||
global_conf["advanced_options"] = solve_relative_path(global_conf.get("advanced_options", {}), sdk_folder)
|
||||
if restapi_git_folder:
|
||||
restapi_git_folder = Path(restapi_git_folder).expanduser()
|
||||
|
||||
# Look for configuration in Readme
|
||||
if readme:
|
||||
swagger_files_in_pr = [readme]
|
||||
else:
|
||||
if not restapi_git_folder:
|
||||
raise ValueError("RestAPI folder must be set if you don't provide a readme.")
|
||||
swagger_files_in_pr = list(restapi_git_folder.glob('specification/**/readme.md'))
|
||||
_LOGGER.info(f"Readme files: {swagger_files_in_pr}")
|
||||
extract_conf_from_readmes(swagger_files_in_pr, restapi_git_folder, repotag, config, force_generation=force_generation)
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
for project, local_conf in config.get("projects", {}).items():
|
||||
if readme:
|
||||
if str(readme) not in project:
|
||||
_LOGGER.info("Skip project %s (readme was %s)", project, readme)
|
||||
continue
|
||||
else:
|
||||
if project_pattern and not any(p in project for p in project_pattern):
|
||||
_LOGGER.info("Skip project %s", project)
|
||||
continue
|
||||
local_conf["autorest_options"] = solve_relative_path(local_conf.get("autorest_options", {}), sdk_folder)
|
||||
|
||||
if readme and readme.startswith("http"):
|
||||
# Simplify here, do not support anything else than Readme.md
|
||||
absolute_markdown_path = readme
|
||||
_LOGGER.info(f"HTTP Markdown input: {absolute_markdown_path}")
|
||||
else:
|
||||
markdown_relative_path, optional_relative_paths = get_input_paths(global_conf, local_conf)
|
||||
|
||||
_LOGGER.info(f"Markdown input: {markdown_relative_path}")
|
||||
_LOGGER.info(f"Optional inputs: {optional_relative_paths}")
|
||||
|
||||
absolute_markdown_path = None
|
||||
if markdown_relative_path:
|
||||
absolute_markdown_path = Path(restapi_git_folder or "", markdown_relative_path).resolve()
|
||||
if optional_relative_paths:
|
||||
local_conf.setdefault('autorest_options', {})['input-file'] = [
|
||||
Path(restapi_git_folder or "", input_path).resolve()
|
||||
for input_path
|
||||
in optional_relative_paths
|
||||
]
|
||||
|
||||
build_project(
|
||||
temp_dir,
|
||||
project,
|
||||
absolute_markdown_path,
|
||||
sdk_folder,
|
||||
global_conf,
|
||||
local_conf,
|
||||
autorest_bin
|
||||
)
|
||||
|
||||
|
||||
def generate_main():
|
||||
"""Main method"""
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Build SDK using Autorest, offline version.',
|
||||
formatter_class=argparse.RawTextHelpFormatter)
|
||||
parser.add_argument('--rest-folder', '-r',
|
||||
dest='restapi_git_folder', default=None,
|
||||
help='Rest API git folder. [default: %(default)s]')
|
||||
parser.add_argument('--project', '-p',
|
||||
dest='project', action='append',
|
||||
help='Select a specific project. Do all by default. You can use a substring for several projects.')
|
||||
parser.add_argument('--readme', '-m',
|
||||
dest='readme',
|
||||
help='Select a specific readme. Must be a path')
|
||||
parser.add_argument('--config', '-c',
|
||||
dest='config_path', default=CONFIG_FILE,
|
||||
help='The JSON configuration format path [default: %(default)s]')
|
||||
parser.add_argument('--autorest',
|
||||
dest='autorest_bin',
|
||||
help='Force the Autorest to be executed. Must be a executable command.')
|
||||
parser.add_argument("-v", "--verbose",
|
||||
dest="verbose", action="store_true",
|
||||
help="Verbosity in INFO mode")
|
||||
parser.add_argument("--debug",
|
||||
dest="debug", action="store_true",
|
||||
help="Verbosity in DEBUG mode")
|
||||
|
||||
parser.add_argument('--sdk-folder', '-s',
|
||||
dest='sdk_folder', default='.',
|
||||
help='A Python SDK folder. [default: %(default)s]')
|
||||
|
||||
args = parser.parse_args()
|
||||
main_logger = logging.getLogger()
|
||||
if args.verbose or args.debug:
|
||||
logging.basicConfig()
|
||||
main_logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
|
||||
|
||||
generate(args.config_path,
|
||||
args.sdk_folder,
|
||||
args.project,
|
||||
args.readme,
|
||||
args.restapi_git_folder,
|
||||
args.autorest_bin)
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_main()
|
|
@ -0,0 +1,289 @@
|
|||
"""SwaggerToSdk core tools.
|
||||
"""
|
||||
from enum import Enum, unique
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
|
||||
from github import Github, UnknownObjectException
|
||||
|
||||
from .autorest_tools import (
|
||||
autorest_latest_version_finder,
|
||||
autorest_bootstrap_version_finder,
|
||||
autorest_swagger_to_sdk_conf,
|
||||
)
|
||||
from azure_devtools.ci_tools.github_tools import (
|
||||
get_files,
|
||||
GithubLink
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_FILE = 'swagger_to_sdk_config.json'
|
||||
|
||||
DEFAULT_COMMIT_MESSAGE = 'Generated from {hexsha}'
|
||||
|
||||
|
||||
def build_file_content():
|
||||
autorest_version = autorest_latest_version_finder()
|
||||
autorest_bootstrap_version = autorest_bootstrap_version_finder()
|
||||
return {
|
||||
'autorest': autorest_version,
|
||||
'autorest_bootstrap': autorest_bootstrap_version,
|
||||
}
|
||||
|
||||
|
||||
def get_repo_tag_meta(meta_conf):
|
||||
repotag = meta_conf.get("repotag")
|
||||
if repotag:
|
||||
return repotag
|
||||
# Guess for now, "repotag" should be added everywhere
|
||||
if "go" in meta_conf["autorest_options"]:
|
||||
return "azure-sdk-for-go"
|
||||
if "ruby" in meta_conf["autorest_options"]:
|
||||
return "azure-sdk-for-ruby"
|
||||
if "java" in meta_conf["autorest_options"]:
|
||||
return "azure-sdk-for-java"
|
||||
if "nodejs" in meta_conf["autorest_options"]:
|
||||
return "azure-sdk-for-node"
|
||||
if "typescript" in meta_conf["autorest_options"]:
|
||||
return "azure-sdk-for-js"
|
||||
raise ValueError("No repotag found or infered")
|
||||
|
||||
|
||||
@unique
|
||||
class Language(str, Enum):
|
||||
GOLANG = "go"
|
||||
RUBY = "ruby"
|
||||
JAVA = "java"
|
||||
NODEJS = "nodejs"
|
||||
CSHARP = "csharp"
|
||||
PYTHON = "python"
|
||||
TYPESCRIPT = "typescript"
|
||||
|
||||
|
||||
def get_language_from_conf(meta_conf):
|
||||
"""Detect the language based on the default Autorest options.
|
||||
Assuming all language use --mylanguage in the config file.
|
||||
If I don't find anything, well just say I don't know...
|
||||
|
||||
This is based on autorest language flags.
|
||||
:rtype: Language
|
||||
"""
|
||||
autorest_options_lang = set(meta_conf["autorest_options"].keys())
|
||||
languages = set()
|
||||
for value in Language:
|
||||
if value in autorest_options_lang:
|
||||
languages.add(value)
|
||||
|
||||
if not languages:
|
||||
_LOGGER.warning("No detected language from this conf")
|
||||
return None # I don't what this conf is about?
|
||||
|
||||
language = languages.pop()
|
||||
if languages:
|
||||
_LOGGER.warning("This SwaggerToSdk conf seems to generate too much language in one call, assume we don't know")
|
||||
return None
|
||||
|
||||
return language
|
||||
|
||||
|
||||
def get_context_tag_from_git_object(git_object):
|
||||
files_list = [file.filename for file in get_files(git_object)]
|
||||
return get_context_tag_from_file_list(files_list)
|
||||
|
||||
|
||||
def get_context_tag_from_file_list(files_list):
|
||||
context_tags = set()
|
||||
for filename in files_list:
|
||||
filepath = Path(filename)
|
||||
filename = filepath.as_posix()
|
||||
if "/examples/" in filename:
|
||||
# Do not compute context for example that are not used in SDK
|
||||
continue
|
||||
# Match if RP name
|
||||
match = re.match(r"specification/(.*)/Microsoft.\w*/(stable|preview)/", filename, re.I)
|
||||
if match:
|
||||
context_tags.add(match.groups()[0])
|
||||
continue
|
||||
# Match if stable/preview but not RP like ARM (i.e. Cognitive Services)
|
||||
match = re.match(r"specification/(.*)/(stable|preview)/", filename, re.I)
|
||||
if match:
|
||||
context_tags.add(match.groups()[0])
|
||||
continue
|
||||
# Match Readme
|
||||
# Do it last step, because if some weird Readme for ServiceFabric...
|
||||
match = re.match(r"specification/(.*)/readme.\w*.?md", filename, re.I)
|
||||
if match:
|
||||
context_tags.add(match.groups()[0])
|
||||
continue
|
||||
# No context-tags
|
||||
return context_tags
|
||||
|
||||
|
||||
def this_conf_will_generate_for_this_pr(git_object, config):
|
||||
"""Try to guess if this PR has a chance to generate something for this conf.
|
||||
|
||||
Right now, just match the language in the conf with the presence
|
||||
of ONLY "readme.language.md" files.
|
||||
"""
|
||||
lang = get_language_from_conf(config)
|
||||
filenames = [file.filename.lower() for file in get_files(git_object)]
|
||||
readme_lang = [name for name in filenames if re.match(r"(.*)readme.\w+.md", name)]
|
||||
|
||||
if len(readme_lang) != len(filenames):
|
||||
return True # This means there is files that are not language specific readme
|
||||
|
||||
return bool([name for name in readme_lang if name.endswith("readme.{}.md".format(lang))])
|
||||
|
||||
|
||||
def get_readme_files_from_git_object(git_object, base_dir=Path('.')):
|
||||
files_list = [file.filename for file in get_files(git_object)]
|
||||
return get_readme_files_from_file_list(files_list, base_dir)
|
||||
|
||||
|
||||
def get_readme_files_from_file_list(files_list, base_dir=Path('.')):
|
||||
"""Get readme files from this PR.
|
||||
Algo is to look for context, and then search for Readme inside this context.
|
||||
"""
|
||||
readme_files = set()
|
||||
context_tags = get_context_tag_from_file_list(files_list)
|
||||
for context_tag in context_tags:
|
||||
expected_folder = Path(base_dir) / Path("specification/{}".format(context_tag))
|
||||
if not expected_folder.is_dir():
|
||||
_LOGGER.warning("From context {} I didn't find folder {}".format(
|
||||
context_tag,
|
||||
expected_folder
|
||||
))
|
||||
continue
|
||||
for expected_readme in [l for l in expected_folder.iterdir() if l.is_file()]:
|
||||
# Need to do a case-insensitive test.
|
||||
match = re.match(r"readme.\w*.?md", expected_readme.name, re.I)
|
||||
if match:
|
||||
readme_files.add(expected_readme.relative_to(Path(base_dir)))
|
||||
return readme_files
|
||||
|
||||
|
||||
def read_config(sdk_git_folder, config_file):
|
||||
"""Read the configuration file and return JSON"""
|
||||
config_path = os.path.join(sdk_git_folder, config_file)
|
||||
with open(config_path, 'r') as config_fd:
|
||||
return json.loads(config_fd.read())
|
||||
|
||||
def read_config_from_github(sdk_id, branch="master", gh_token=None):
|
||||
raw_link = str(get_configuration_github_path(sdk_id, branch))
|
||||
_LOGGER.debug("Will try to download: %s", raw_link)
|
||||
_LOGGER.debug("Token is defined: %s", gh_token is not None)
|
||||
headers = {"Authorization": "token {}".format(gh_token)} if gh_token else {}
|
||||
response = requests.get(raw_link, headers=headers)
|
||||
if response.status_code != 200:
|
||||
raise ValueError("Unable to download conf file for SDK {} branch {}: status code {}".format(
|
||||
sdk_id,
|
||||
branch,
|
||||
response.status_code
|
||||
))
|
||||
return json.loads(response.text)
|
||||
|
||||
def extract_conf_from_readmes(swagger_files_in_pr, restapi_git_folder, sdk_git_id, config, force_generation=False):
|
||||
readme_files_in_pr = {readme for readme in swagger_files_in_pr if getattr(readme, "name", readme).lower().endswith("readme.md")}
|
||||
for readme_file in readme_files_in_pr:
|
||||
build_swaggertosdk_conf_from_json_readme(readme_file, sdk_git_id, config, base_folder=restapi_git_folder, force_generation=force_generation)
|
||||
|
||||
def get_readme_path(readme_file, base_folder='.'):
|
||||
"""Get a readable Readme path.
|
||||
|
||||
If start with http, assume online, ignore base_folder and convert to raw link if necessary.
|
||||
If base_folder is not None, assume relative to base_folder.
|
||||
"""
|
||||
if not isinstance(readme_file, Path) and readme_file.startswith("http"):
|
||||
return GithubLink.from_string(readme_file).as_raw_link()
|
||||
else:
|
||||
if base_folder is None:
|
||||
base_folder='.'
|
||||
return str(Path(base_folder) / Path(readme_file))
|
||||
|
||||
def build_swaggertosdk_conf_from_json_readme(readme_file, sdk_git_id, config, base_folder='.', force_generation=False):
|
||||
"""Get the JSON conf of this README, and create SwaggerToSdk conf.
|
||||
|
||||
Readme path can be any readme syntax accepted by autorest.
|
||||
readme_file will be project key as-is.
|
||||
|
||||
:param str readme_file: A path that Autorest accepts. Raw GH link or absolute path.
|
||||
:param str sdk_dit_id: Repo ID. IF org/login is provided, will be stripped.
|
||||
:param dict config: Config where to update the "projects" key.
|
||||
:param bool force_generation: If no Swagger to SDK section is found, force once with the Readme as input
|
||||
"""
|
||||
readme_full_path = get_readme_path(readme_file, base_folder)
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
readme_as_conf = autorest_swagger_to_sdk_conf(
|
||||
readme_full_path,
|
||||
temp_dir
|
||||
)
|
||||
generated_config = {
|
||||
"markdown": readme_full_path,
|
||||
}
|
||||
sdk_git_short_id = sdk_git_id.split("/")[-1].lower()
|
||||
_LOGGER.info("Looking for tag {} in readme {}".format(sdk_git_short_id, readme_file))
|
||||
for swagger_to_sdk_conf in readme_as_conf:
|
||||
repo = swagger_to_sdk_conf.get("repo", "")
|
||||
repo = repo.split("/")[-1].lower() # Be sure there is no org/login part
|
||||
if repo == sdk_git_short_id:
|
||||
_LOGGER.info("This Readme contains a swagger-to-sdk section for repo {}".format(repo))
|
||||
generated_config.update({
|
||||
"autorest_options": swagger_to_sdk_conf.get("autorest_options", {}),
|
||||
"after_scripts": swagger_to_sdk_conf.get("after_scripts", []),
|
||||
})
|
||||
config.setdefault("projects", {})[str(readme_file)] = generated_config
|
||||
return generated_config
|
||||
else:
|
||||
_LOGGER.info("Skip mismatch {} from {}".format(repo, sdk_git_short_id))
|
||||
if not force_generation:
|
||||
_LOGGER.info("Didn't find tag {} in readme {}. Did you forget to update the SwaggerToSdk section?".format(sdk_git_short_id, readme_file))
|
||||
else:
|
||||
_LOGGER.info("Didn't find tag {} in readme {}. Forcing it.".format(sdk_git_short_id, readme_file))
|
||||
config.setdefault("projects", {})[str(readme_file)] = generated_config
|
||||
|
||||
def get_input_paths(global_conf, local_conf):
|
||||
"""Returns a 2-tuple:
|
||||
- Markdown Path or None
|
||||
- Input-file Paths or empty list
|
||||
"""
|
||||
del global_conf # Unused
|
||||
|
||||
relative_markdown_path = None # Markdown is optional
|
||||
input_files = [] # Input file could be empty
|
||||
if "markdown" in local_conf:
|
||||
relative_markdown_path = Path(local_conf['markdown'])
|
||||
input_files = local_conf.get('autorest_options', {}).get('input-file', [])
|
||||
if input_files and not isinstance(input_files, list):
|
||||
input_files = [input_files]
|
||||
input_files = [Path(input_file) for input_file in input_files]
|
||||
if not relative_markdown_path and not input_files:
|
||||
raise ValueError("No input file found")
|
||||
return (relative_markdown_path, input_files)
|
||||
|
||||
|
||||
def solve_relative_path(autorest_options, sdk_root):
|
||||
"""Solve relative path in conf.
|
||||
|
||||
If a key is prefixed by "sdkrel:", it's solved against SDK root.
|
||||
"""
|
||||
SDKRELKEY = "sdkrel:"
|
||||
solved_autorest_options = {}
|
||||
for key, value in autorest_options.items():
|
||||
if key.startswith(SDKRELKEY):
|
||||
_LOGGER.debug("Found a sdkrel pair: %s/%s", key, value)
|
||||
subkey = key[len(SDKRELKEY):]
|
||||
solved_value = Path(sdk_root, value).resolve()
|
||||
solved_autorest_options[subkey] = str(solved_value)
|
||||
else:
|
||||
solved_autorest_options[key] = value
|
||||
return solved_autorest_options
|
||||
|
||||
def get_configuration_github_path(sdk_id, branch="master"):
|
||||
return GithubLink(sdk_id, "raw", branch, CONFIG_FILE)
|
|
@ -0,0 +1,311 @@
|
|||
"""Swagger to SDK"""
|
||||
import os
|
||||
import shutil
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
import tempfile
|
||||
|
||||
from git import Repo, GitCommandError
|
||||
|
||||
from .SwaggerToSdkCore import (
|
||||
read_config_from_github,
|
||||
DEFAULT_COMMIT_MESSAGE,
|
||||
get_input_paths,
|
||||
extract_conf_from_readmes,
|
||||
get_readme_files_from_git_object,
|
||||
build_file_content,
|
||||
solve_relative_path,
|
||||
this_conf_will_generate_for_this_pr
|
||||
)
|
||||
from .autorest_tools import (
|
||||
execute_simple_command,
|
||||
generate_code,
|
||||
merge_options,
|
||||
)
|
||||
from azure_devtools.ci_tools.git_tools import (
|
||||
checkout_and_create_branch,
|
||||
do_commit,
|
||||
)
|
||||
from azure_devtools.ci_tools.github_tools import (
|
||||
configure_user,
|
||||
manage_git_folder,
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def move_wrapper_files_or_dirs(src_root, dst_root, global_conf, local_conf):
|
||||
"""Save wrapper files somewhere for replace them after generation.
|
||||
"""
|
||||
src_relative_path = local_conf.get('output_dir', '')
|
||||
src_abs_path = Path(src_root, src_relative_path)
|
||||
dst_abs_path = Path(dst_root, src_relative_path)
|
||||
|
||||
wrapper_files_or_dirs = merge_options(global_conf, local_conf, "wrapper_filesOrDirs") or []
|
||||
|
||||
for wrapper_file_or_dir in wrapper_files_or_dirs:
|
||||
for file_path in src_abs_path.glob(wrapper_file_or_dir):
|
||||
relative_file_path = file_path.relative_to(src_abs_path)
|
||||
file_path_dest = Path(dst_abs_path, relative_file_path)
|
||||
if file_path.is_file():
|
||||
file_path_dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
_LOGGER.info("Moving %s to %s", str(file_path), str(file_path_dest))
|
||||
# This does not work in Windows if generatd and dest are not in the same drive
|
||||
# file_path.replace(file_path_dest)
|
||||
shutil.move(file_path, file_path_dest)
|
||||
|
||||
|
||||
def delete_extra_files(sdk_root, global_conf, local_conf):
|
||||
src_relative_path = local_conf.get('output_dir', '')
|
||||
src_abs_path = Path(sdk_root, src_relative_path)
|
||||
|
||||
delete_files_or_dirs = merge_options(global_conf, local_conf, "delete_filesOrDirs") or []
|
||||
|
||||
for delete_file_or_dir in delete_files_or_dirs:
|
||||
for file_path in src_abs_path.glob(delete_file_or_dir):
|
||||
if file_path.is_file():
|
||||
file_path.unlink()
|
||||
else:
|
||||
shutil.rmtree(str(file_path))
|
||||
|
||||
|
||||
def move_autorest_files(client_generated_path, sdk_root, global_conf, local_conf):
|
||||
"""Update data from generated to final folder.
|
||||
|
||||
This is one only if output_dir is set, otherwise it's considered generated in place
|
||||
and does not required moving
|
||||
"""
|
||||
dest = local_conf.get('output_dir', None)
|
||||
if not dest:
|
||||
return
|
||||
destination_folder = get_local_path_dir(sdk_root, dest)
|
||||
|
||||
generated_relative_base_directory = local_conf.get('generated_relative_base_directory') or \
|
||||
global_conf.get('generated_relative_base_directory')
|
||||
|
||||
if generated_relative_base_directory:
|
||||
client_possible_path = [elt for elt in client_generated_path.glob(generated_relative_base_directory) if elt.is_dir()]
|
||||
try:
|
||||
client_generated_path = client_possible_path.pop()
|
||||
except IndexError:
|
||||
err_msg = "Incorrect generated_relative_base_directory folder: {}\n".format(generated_relative_base_directory)
|
||||
err_msg += "Base folders were: : {}\n".format([f.relative_to(client_generated_path) for f in client_generated_path.iterdir()])
|
||||
_LOGGER.critical(err_msg)
|
||||
raise ValueError(err_msg)
|
||||
if client_possible_path:
|
||||
err_msg = "generated_relative_base_directory parameter is ambiguous: {} {}".format(
|
||||
client_generated_path,
|
||||
client_possible_path
|
||||
)
|
||||
_LOGGER.critical(err_msg)
|
||||
raise ValueError(err_msg)
|
||||
|
||||
shutil.rmtree(str(destination_folder))
|
||||
# This does not work in Windows if generatd and dest are not in the same drive
|
||||
# client_generated_path.replace(destination_folder)
|
||||
shutil.move(client_generated_path, destination_folder)
|
||||
|
||||
|
||||
def write_build_file(sdk_root, local_conf):
|
||||
build_dir = local_conf.get('build_dir')
|
||||
if build_dir:
|
||||
build_folder = get_local_path_dir(sdk_root, build_dir)
|
||||
build_file = Path(build_folder, "build.json")
|
||||
with open(build_file, 'w') as build_fd:
|
||||
json.dump(build_file_content(), build_fd, indent=2)
|
||||
|
||||
|
||||
def execute_after_script(sdk_root, global_conf, local_conf):
|
||||
after_scripts = merge_options(global_conf, local_conf, "after_scripts", keep_list_order=True) or []
|
||||
local_envs = dict(os.environ)
|
||||
local_envs.update(global_conf.get("envs", {}))
|
||||
|
||||
for script in after_scripts:
|
||||
_LOGGER.info("Execute after script: %s", script)
|
||||
execute_simple_command(script, cwd=sdk_root, shell=True, env=local_envs)
|
||||
|
||||
|
||||
def get_local_path_dir(root, relative_path):
|
||||
build_folder = Path(root, relative_path)
|
||||
if not build_folder.is_dir():
|
||||
err_msg = "Folder does not exist or is not accessible: {}".format(
|
||||
build_folder)
|
||||
_LOGGER.critical(err_msg)
|
||||
raise ValueError(err_msg)
|
||||
return build_folder
|
||||
|
||||
|
||||
def build_project(temp_dir, project, absolute_markdown_path, sdk_folder, global_conf, local_conf, autorest_bin=None):
|
||||
absolute_generated_path = Path(temp_dir, project)
|
||||
absolute_save_path = Path(temp_dir, "save")
|
||||
move_wrapper_files_or_dirs(sdk_folder, absolute_save_path, global_conf, local_conf)
|
||||
generate_code(absolute_markdown_path,
|
||||
global_conf,
|
||||
local_conf,
|
||||
absolute_generated_path if "output_dir" in local_conf else None,
|
||||
autorest_bin)
|
||||
move_autorest_files(absolute_generated_path, sdk_folder, global_conf, local_conf)
|
||||
move_wrapper_files_or_dirs(absolute_save_path, sdk_folder, global_conf, local_conf)
|
||||
delete_extra_files(sdk_folder, global_conf, local_conf)
|
||||
write_build_file(sdk_folder, local_conf)
|
||||
execute_after_script(sdk_folder, global_conf, local_conf)
|
||||
|
||||
|
||||
def build_libraries(config, skip_callback, restapi_git_folder, sdk_repo, temp_dir, autorest_bin=None):
|
||||
"""Main method of the the file"""
|
||||
|
||||
global_conf = config["meta"]
|
||||
global_conf["autorest_options"] = solve_relative_path(global_conf.get("autorest_options", {}), sdk_repo.working_tree_dir)
|
||||
global_conf["envs"] = solve_relative_path(global_conf.get("envs", {}), sdk_repo.working_tree_dir)
|
||||
global_conf["advanced_options"] = solve_relative_path(global_conf.get("advanced_options", {}), sdk_repo.working_tree_dir)
|
||||
|
||||
for project, local_conf in config.get("projects", {}).items():
|
||||
if skip_callback(project, local_conf):
|
||||
_LOGGER.info("Skip project %s", project)
|
||||
continue
|
||||
local_conf["autorest_options"] = solve_relative_path(local_conf.get("autorest_options", {}), sdk_repo.working_tree_dir)
|
||||
|
||||
markdown_relative_path, optional_relative_paths = get_input_paths(global_conf, local_conf)
|
||||
_LOGGER.info(f"Markdown input: {markdown_relative_path}")
|
||||
_LOGGER.info(f"Optional inputs: {optional_relative_paths}")
|
||||
|
||||
absolute_markdown_path = None
|
||||
if markdown_relative_path:
|
||||
absolute_markdown_path = Path(restapi_git_folder, markdown_relative_path).resolve()
|
||||
if optional_relative_paths:
|
||||
local_conf.setdefault('autorest_options', {})['input-file'] = [
|
||||
Path(restapi_git_folder, input_path).resolve()
|
||||
for input_path
|
||||
in optional_relative_paths
|
||||
]
|
||||
|
||||
sdk_folder = sdk_repo.working_tree_dir
|
||||
build_project(
|
||||
temp_dir,
|
||||
project,
|
||||
absolute_markdown_path,
|
||||
sdk_folder,
|
||||
global_conf,
|
||||
local_conf,
|
||||
autorest_bin
|
||||
)
|
||||
|
||||
def generate_sdk_from_git_object(git_object, branch_name, restapi_git_id, sdk_git_id, base_branch_names, *, fallback_base_branch_name="master", sdk_tag=None):
|
||||
"""Generate SDK from a commit or a PR object.
|
||||
|
||||
git_object is the initial commit/PR from the RestAPI repo. If git_object is a PR, prefer to checkout Github PR "merge_commit_sha"
|
||||
restapi_git_id explains where to clone the repo.
|
||||
sdk_git_id explains where to push the commit.
|
||||
sdk_tag explains what is the tag used in the Readme for the swagger-to-sdk section. If not provided, use sdk_git_id.
|
||||
branch_name is the expected branch name in the SDK repo.
|
||||
- If this branch exists, use it.
|
||||
- If not, use the base branch to create that branch (base branch is where I intend to do my PR)
|
||||
- If base_branch_names is not provided, use fallback_base_branch_name as base
|
||||
- If this base branch is provided and does not exists, create this base branch first using fallback_base_branch_name (this one is required to exist)
|
||||
|
||||
WARNING:
|
||||
This method might push to "branch_name" and "base_branch_name". No push will be made to "fallback_base_branch_name"
|
||||
"""
|
||||
gh_token = os.environ["GH_TOKEN"]
|
||||
message_template = DEFAULT_COMMIT_MESSAGE
|
||||
autorest_bin = None
|
||||
if sdk_tag is None:
|
||||
sdk_tag = sdk_git_id
|
||||
|
||||
try: # Checkout the sha if commit obj
|
||||
branched_rest_api_id = restapi_git_id+'@'+git_object.sha
|
||||
pr_number = None
|
||||
except (AttributeError, TypeError): # This is a PR, don't clone the fork but "base" repo and PR magic commit
|
||||
if git_object.merge_commit_sha:
|
||||
branched_rest_api_id = git_object.base.repo.full_name+'@'+git_object.merge_commit_sha
|
||||
else:
|
||||
branched_rest_api_id = git_object.base.repo.full_name
|
||||
pr_number = git_object.number
|
||||
|
||||
# Always clone SDK from fallback branch that is required to exist
|
||||
branched_sdk_git_id = sdk_git_id+'@'+fallback_base_branch_name
|
||||
|
||||
# I don't know if the destination branch exists, try until it works
|
||||
config = None
|
||||
branch_list = base_branch_names + [branch_name] + [fallback_base_branch_name]
|
||||
for branch in branch_list:
|
||||
try:
|
||||
config = read_config_from_github(sdk_git_id, branch, gh_token)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
if config is None:
|
||||
raise ValueError("Unable to locate configuration in {}".format(branch_list))
|
||||
global_conf = config["meta"]
|
||||
|
||||
# If PR is only about a language that this conf can't handle, skip fast
|
||||
if not this_conf_will_generate_for_this_pr(git_object, global_conf):
|
||||
_LOGGER.info("Skipping this job based on conf not impacted by Git object")
|
||||
return
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
|
||||
clone_dir = Path(temp_dir) / Path(global_conf.get("advanced_options", {}).get("clone_dir", "sdk"))
|
||||
_LOGGER.info("Clone dir will be: %s", clone_dir)
|
||||
|
||||
with manage_git_folder(gh_token, Path(temp_dir) / Path("rest"), branched_rest_api_id, pr_number=pr_number) as restapi_git_folder, \
|
||||
manage_git_folder(gh_token, clone_dir, branched_sdk_git_id) as sdk_folder:
|
||||
|
||||
readme_files_infered = get_readme_files_from_git_object(git_object, restapi_git_folder)
|
||||
_LOGGER.info("Readmes files infered from PR: %s ", readme_files_infered)
|
||||
if not readme_files_infered:
|
||||
_LOGGER.info("No Readme in PR, quit")
|
||||
return
|
||||
|
||||
# SDK part
|
||||
sdk_repo = Repo(str(sdk_folder))
|
||||
|
||||
for base_branch in base_branch_names:
|
||||
_LOGGER.info('Checkout and create %s', base_branch)
|
||||
checkout_and_create_branch(sdk_repo, base_branch)
|
||||
|
||||
_LOGGER.info('Try to checkout destination branch %s', branch_name)
|
||||
try:
|
||||
sdk_repo.git.checkout(branch_name)
|
||||
_LOGGER.info('The branch exists.')
|
||||
except GitCommandError:
|
||||
_LOGGER.info('Destination branch does not exists')
|
||||
# Will be created by do_commit
|
||||
|
||||
configure_user(gh_token, sdk_repo)
|
||||
|
||||
# Look for configuration in Readme
|
||||
_LOGGER.info('Extract conf from Readmes for target: %s', sdk_git_id)
|
||||
extract_conf_from_readmes(readme_files_infered, restapi_git_folder, sdk_tag, config)
|
||||
_LOGGER.info('End of extraction')
|
||||
|
||||
def skip_callback(project, local_conf):
|
||||
# We know "project" is based on Path in "readme_files_infered"
|
||||
if Path(project) in readme_files_infered:
|
||||
return False
|
||||
# Might be a regular project
|
||||
markdown_relative_path, optional_relative_paths = get_input_paths(global_conf, local_conf)
|
||||
if not (
|
||||
markdown_relative_path in readme_files_infered or
|
||||
any(input_file in readme_files_infered for input_file in optional_relative_paths)):
|
||||
_LOGGER.info(f"In project {project} no files involved in this commit")
|
||||
return True
|
||||
return False
|
||||
|
||||
build_libraries(config, skip_callback, restapi_git_folder,
|
||||
sdk_repo, temp_dir, autorest_bin)
|
||||
|
||||
try:
|
||||
commit_for_sha = git_object.commit # Commit
|
||||
except AttributeError:
|
||||
commit_for_sha = list(git_object.get_commits())[-1].commit # PR
|
||||
message = message_template + "\n\n" + commit_for_sha.message
|
||||
commit_sha = do_commit(sdk_repo, message, branch_name, commit_for_sha.sha)
|
||||
if commit_sha:
|
||||
for base_branch in base_branch_names:
|
||||
sdk_repo.git.push('origin', base_branch, set_upstream=True)
|
||||
sdk_repo.git.push('origin', branch_name, set_upstream=True)
|
||||
return "https://github.com/{}/commit/{}".format(sdk_git_id, commit_sha)
|
|
@ -0,0 +1,150 @@
|
|||
import json
|
||||
import logging
|
||||
import os.path
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def autorest_latest_version_finder():
|
||||
autorest_bin = shutil.which("autorest")
|
||||
cmd_line = "{} --version --json".format(autorest_bin)
|
||||
return json.loads(subprocess.check_output(cmd_line.split()).decode().strip())
|
||||
|
||||
|
||||
def autorest_swagger_to_sdk_conf(readme, output_folder):
|
||||
_LOGGER.info("Looking for swagger-to-sdk section in {}".format(readme))
|
||||
autorest_bin = shutil.which("autorest")
|
||||
# --input-file=foo is to workaround a bug where the command is not executed at all if no input-file is found (even if we don't care about input-file here)
|
||||
cmd_line = "{} {} --perform-load=false --swagger-to-sdk --output-artifact=configuration.json --input-file=foo --output-folder={}".format(
|
||||
autorest_bin,
|
||||
str(readme),
|
||||
str(output_folder)
|
||||
)
|
||||
execute_simple_command(cmd_line.split())
|
||||
conf_path = Path(output_folder, "configuration.json")
|
||||
with conf_path.open() as fd:
|
||||
conf_as_json = json.load(fd)
|
||||
swagger_to_sdk_conf = [c for c in conf_as_json.get("swagger-to-sdk", []) if c]
|
||||
return swagger_to_sdk_conf
|
||||
|
||||
|
||||
def autorest_bootstrap_version_finder():
|
||||
try:
|
||||
npm_bin = shutil.which('npm')
|
||||
cmd_line = ("{} --json ls autorest -g".format(npm_bin)).split()
|
||||
return json.loads(subprocess.check_output(cmd_line).decode().strip())
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
def merge_options(global_conf, local_conf, key, *, keep_list_order=False):
|
||||
"""Merge the conf using override: local conf is prioritary over global.
|
||||
|
||||
If keep_list_order is True, list are merged global+local. Might have duplicate.
|
||||
If false, duplication are removed.
|
||||
"""
|
||||
global_keyed_conf = global_conf.get(key) # Could be None
|
||||
local_keyed_conf = local_conf.get(key) # Could be None
|
||||
|
||||
if global_keyed_conf is None or local_keyed_conf is None:
|
||||
return global_keyed_conf or local_keyed_conf
|
||||
|
||||
if isinstance(global_keyed_conf, list):
|
||||
if keep_list_order:
|
||||
options = list(global_keyed_conf)
|
||||
options += local_keyed_conf
|
||||
return options
|
||||
options = set(global_keyed_conf)
|
||||
else:
|
||||
options = dict(global_keyed_conf)
|
||||
|
||||
options.update(local_keyed_conf)
|
||||
return options
|
||||
|
||||
|
||||
def build_autorest_options(global_conf, local_conf):
|
||||
"""Build the string of the Autorest options"""
|
||||
merged_options = merge_options(global_conf, local_conf, "autorest_options") or {}
|
||||
def value(x):
|
||||
escaped = x if " " not in x else "'"+x+"'"
|
||||
return "={}".format(escaped) if escaped else ""
|
||||
listify = lambda x: x if isinstance(x, list) else [x]
|
||||
|
||||
sorted_keys = sorted(list(merged_options.keys())) # To be honest, just to help for tests...
|
||||
return [
|
||||
"--{}{}".format(key.lower(), value(str(option)))
|
||||
for key in sorted_keys
|
||||
for option in listify(merged_options[key])
|
||||
]
|
||||
|
||||
def generate_code(input_file, global_conf, local_conf, output_dir=None, autorest_bin=None):
|
||||
"""Call the Autorest process with the given parameters.
|
||||
|
||||
Input file can be a Path instance, a str (will be cast to Path), or a str starting with
|
||||
http (will be passed to Autorest as is).
|
||||
"""
|
||||
if not autorest_bin:
|
||||
autorest_bin = shutil.which("autorest")
|
||||
if not autorest_bin:
|
||||
raise ValueError("No autorest found in PATH and no autorest path option used")
|
||||
|
||||
params = [str(input_file)] if input_file else []
|
||||
if output_dir: # For legacy. Define "output-folder" as "autorest_options" now
|
||||
params.append("--output-folder={}".format(str(output_dir)+os.path.sep))
|
||||
params += build_autorest_options(global_conf, local_conf)
|
||||
|
||||
input_files = local_conf.get("autorest_options", {}).get("input-file", [])
|
||||
|
||||
if not input_file and not input_files:
|
||||
raise ValueError("I don't have input files!")
|
||||
|
||||
path_input_files = [pit for pit in input_files if isinstance(pit, Path)]
|
||||
if input_file and isinstance(input_file, Path):
|
||||
input_path = input_file.parent
|
||||
elif path_input_files:
|
||||
input_path = path_input_files[0].parent
|
||||
else:
|
||||
input_path = Path(".")
|
||||
|
||||
cmd_line = autorest_bin.split()
|
||||
cmd_line += params
|
||||
_LOGGER.info("Autorest cmd line:\n%s", " ".join(cmd_line))
|
||||
|
||||
execute_simple_command(cmd_line, cwd=str(input_path))
|
||||
# Checks that Autorest did something if output_dir is under control
|
||||
# Note that this can fail if "--output-folder" was overidden by the Readme.
|
||||
if output_dir and (not output_dir.is_dir() or next(output_dir.iterdir(), None) is None):
|
||||
raise ValueError("Autorest call ended with 0, but no files were generated")
|
||||
|
||||
|
||||
def execute_simple_command(cmd_line, cwd=None, shell=False, env=None):
|
||||
try:
|
||||
process = subprocess.Popen(cmd_line,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
cwd=cwd,
|
||||
shell=shell,
|
||||
env=env)
|
||||
output_buffer = []
|
||||
for line in process.stdout:
|
||||
output_buffer.append(line.rstrip())
|
||||
_LOGGER.info(output_buffer[-1])
|
||||
process.wait()
|
||||
output = "\n".join(output_buffer)
|
||||
if process.returncode:
|
||||
raise subprocess.CalledProcessError(
|
||||
process.returncode,
|
||||
cmd_line,
|
||||
output
|
||||
)
|
||||
return output
|
||||
except Exception as err:
|
||||
_LOGGER.error(err)
|
||||
raise
|
||||
else:
|
||||
_LOGGER.info("Return code: %s", process.returncode)
|
|
@ -0,0 +1,6 @@
|
|||
recursive-include tests *.py *.yaml
|
||||
include *.md
|
||||
{%- for init_name in init_names %}
|
||||
include {{ init_name }}
|
||||
{%- endfor %}
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
# Microsoft Azure SDK for Python
|
||||
|
||||
This is the Microsoft Azure {{package_pprint_name}} Client Library.
|
||||
This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8.
|
||||
For a more complete view of Azure libraries, see the [Github repo](https://github.com/Azure/azure-sdk-for-python/)
|
||||
|
||||
|
||||
# Usage
|
||||
|
||||
For code examples, see [{{package_pprint_name}}](https://docs.microsoft.com/python/api/overview/azure/{{package_doc_id}})
|
||||
on docs.microsoft.com.
|
||||
|
||||
|
||||
# Provide Feedback
|
||||
|
||||
If you encounter any bugs or have suggestions, please file an issue in the
|
||||
[Issues](https://github.com/Azure/azure-sdk-for-python/issues)
|
||||
section of the project.
|
||||
|
||||
|
||||
![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2F{{package_name}}%2FREADME.png)
|
|
@ -0,0 +1 @@
|
|||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
|
@ -0,0 +1,2 @@
|
|||
[bdist_wheel]
|
||||
universal=1
|
|
@ -0,0 +1,96 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See License.txt in the project root for
|
||||
# license information.
|
||||
#--------------------------------------------------------------------------
|
||||
|
||||
import re
|
||||
import os.path
|
||||
from io import open
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
# Change the PACKAGE_NAME only to change folder and different name
|
||||
PACKAGE_NAME = "{{package_name}}"
|
||||
PACKAGE_PPRINT_NAME = "{{package_pprint_name}}"
|
||||
|
||||
# a-b-c => a/b/c
|
||||
package_folder_path = PACKAGE_NAME.replace('-', '/')
|
||||
# a-b-c => a.b.c
|
||||
namespace_name = PACKAGE_NAME.replace('-', '.')
|
||||
|
||||
# azure v0.x is not compatible with this package
|
||||
# azure v0.x used to have a __version__ attribute (newer versions don't)
|
||||
try:
|
||||
import azure
|
||||
try:
|
||||
ver = azure.__version__
|
||||
raise Exception(
|
||||
'This package is incompatible with azure=={}. '.format(ver) +
|
||||
'Uninstall it with "pip uninstall azure".'
|
||||
)
|
||||
except AttributeError:
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Version extraction inspired from 'requests'
|
||||
with open(os.path.join(package_folder_path, 'version.py')
|
||||
if os.path.exists(os.path.join(package_folder_path, 'version.py'))
|
||||
else os.path.join(package_folder_path, '_version.py'), 'r') as fd:
|
||||
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
|
||||
fd.read(), re.MULTILINE).group(1)
|
||||
|
||||
if not version:
|
||||
raise RuntimeError('Cannot find version information')
|
||||
|
||||
with open('README.md', encoding='utf-8') as f:
|
||||
readme = f.read()
|
||||
with open('CHANGELOG.md', encoding='utf-8') as f:
|
||||
changelog = f.read()
|
||||
|
||||
setup(
|
||||
name=PACKAGE_NAME,
|
||||
version=version,
|
||||
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
|
||||
long_description=readme + '\n\n' + changelog,
|
||||
long_description_content_type='text/markdown',
|
||||
license='MIT License',
|
||||
author='Microsoft Corporation',
|
||||
author_email='azpysdkhelp@microsoft.com',
|
||||
url='https://github.com/Azure/azure-sdk-for-python',
|
||||
classifiers=[
|
||||
'{{classifier}}',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 2',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
'Programming Language :: Python :: 3.8',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
],
|
||||
zip_safe=False,
|
||||
packages=find_packages(exclude=[
|
||||
'tests',
|
||||
# Exclude packages that will be covered by PEP420 or nspkg
|
||||
{%- for nspkg_name in nspkg_names %}
|
||||
'{{ nspkg_name }}',
|
||||
{%- endfor %}
|
||||
]),
|
||||
install_requires=[
|
||||
'msrest>=0.5.0',
|
||||
{%- if need_msrestazure %}
|
||||
'msrestazure>=0.4.32,<2.0.0',
|
||||
{%- endif %}
|
||||
'azure-common~=1.1',
|
||||
{%- if need_azurecore %}
|
||||
'azure-mgmt-core>=1.0.0,<2.0.0',
|
||||
{%- endif %}
|
||||
],
|
||||
extras_require={
|
||||
":python_version<'3.0'": ['{{package_nspkg}}'],
|
||||
}
|
||||
)
|
|
@ -0,0 +1,100 @@
|
|||
import argparse
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import tempfile
|
||||
|
||||
from azure_devtools.ci_tools.git_tools import (
|
||||
do_commit,
|
||||
)
|
||||
from azure_devtools.ci_tools.github_tools import (
|
||||
manage_git_folder,
|
||||
configure_user
|
||||
)
|
||||
|
||||
from git import Repo
|
||||
from github import Github
|
||||
|
||||
from . import build_packaging_by_package_name
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_SDK_FOLDER_RE = re.compile(r"^(sdk/[\w-]+)/(azure[\w-]+)/", re.ASCII)
|
||||
|
||||
|
||||
def update_pr(gh_token, repo_id, pr_number):
|
||||
from github import Github
|
||||
con = Github(gh_token)
|
||||
repo = con.get_repo(repo_id)
|
||||
sdk_pr = repo.get_pull(pr_number)
|
||||
files = [one_file.filename for one_file in sdk_pr.get_files() if one_file.status not in ['removed']]
|
||||
# "get_files" of Github only download the first 300 files. Might not be enough.
|
||||
package_names = {('.', f.split('/')[0]) for f in files if f.startswith("azure")}
|
||||
# Handle the SDK folder as well
|
||||
matches = {_SDK_FOLDER_RE.search(f) for f in files}
|
||||
package_names.update({match.groups() for match in matches if match is not None})
|
||||
|
||||
# Get PR branch to push
|
||||
head_repo = sdk_pr.head.repo.full_name
|
||||
head_branch = sdk_pr.head.ref
|
||||
branched_index = "{}@{}".format(head_repo, head_branch)
|
||||
_LOGGER.info("Checkout %s", branched_index)
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir, \
|
||||
manage_git_folder(gh_token, Path(temp_dir) / Path("sdk"), branched_index) as sdk_repo_root:
|
||||
|
||||
sdk_repo = Repo(str(sdk_repo_root))
|
||||
configure_user(gh_token, sdk_repo)
|
||||
|
||||
for base_folder, package_name in package_names:
|
||||
if package_name.endswith("nspkg"):
|
||||
_LOGGER.info("Skip nspkg packages for update PR")
|
||||
continue
|
||||
|
||||
# Rebuild packaging
|
||||
_LOGGER.info("Try update package %s from folder %s", package_name, base_folder)
|
||||
build_packaging_by_package_name(package_name, sdk_repo_root / Path(base_folder), build_conf=True)
|
||||
# Commit that
|
||||
do_commit(
|
||||
sdk_repo,
|
||||
"Packaging update of {}".format(package_name),
|
||||
head_branch,
|
||||
None # Unused
|
||||
)
|
||||
# Push all commits at once
|
||||
sdk_repo.git.push('origin', head_branch, set_upstream=True)
|
||||
|
||||
def update_pr_main():
|
||||
"""Main method"""
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Build package.',
|
||||
formatter_class=argparse.RawTextHelpFormatter)
|
||||
parser.add_argument('--pr-number', '-p',
|
||||
dest='pr_number', type=int, required=True,
|
||||
help='PR number')
|
||||
parser.add_argument('--repo', '-r',
|
||||
dest='repo_id', default="Azure/azure-sdk-for-python",
|
||||
help='Repo id. [default: %(default)s]')
|
||||
parser.add_argument("-v", "--verbose",
|
||||
dest="verbose", action="store_true",
|
||||
help="Verbosity in INFO mode")
|
||||
parser.add_argument("--debug",
|
||||
dest="debug", action="store_true",
|
||||
help="Verbosity in DEBUG mode")
|
||||
|
||||
args = parser.parse_args()
|
||||
main_logger = logging.getLogger()
|
||||
if args.verbose or args.debug:
|
||||
logging.basicConfig()
|
||||
main_logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
|
||||
|
||||
update_pr(
|
||||
os.environ.get("GH_TOKEN", None),
|
||||
args.repo_id,
|
||||
int(args.pr_number),
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
update_pr_main()
|
|
@ -0,0 +1,46 @@
|
|||
from contextlib import contextmanager
|
||||
import tempfile
|
||||
import subprocess
|
||||
import venv
|
||||
|
||||
class ExtendedEnvBuilder(venv.EnvBuilder):
|
||||
"""An extended env builder which saves the context, to have access
|
||||
easily to bin path and such.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.context = None
|
||||
super(ExtendedEnvBuilder, self).__init__(*args, **kwargs)
|
||||
|
||||
def ensure_directories(self, env_dir):
|
||||
self.context = super(ExtendedEnvBuilder, self).ensure_directories(env_dir)
|
||||
return self.context
|
||||
|
||||
|
||||
def create(env_dir, system_site_packages=False, clear=False,
|
||||
symlinks=False, with_pip=False, prompt=None):
|
||||
"""Create a virtual environment in a directory."""
|
||||
builder = ExtendedEnvBuilder(system_site_packages=system_site_packages,
|
||||
clear=clear, symlinks=symlinks, with_pip=with_pip,
|
||||
prompt=prompt)
|
||||
builder.create(env_dir)
|
||||
return builder.context
|
||||
|
||||
@contextmanager
|
||||
def create_venv_with_package(packages):
|
||||
"""Create a venv with these packages in a temp dir and yielf the env.
|
||||
|
||||
packages should be an iterable of pip version instructio (e.g. package~=1.2.3)
|
||||
"""
|
||||
with tempfile.TemporaryDirectory() as tempdir:
|
||||
myenv = create(tempdir, with_pip=True)
|
||||
pip_call = [
|
||||
myenv.env_exe,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
]
|
||||
subprocess.check_call(pip_call + ['-U', 'pip'])
|
||||
if packages:
|
||||
subprocess.check_call(pip_call + packages)
|
||||
yield myenv
|
|
@ -0,0 +1 @@
|
|||
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore
|
|
@ -0,0 +1,58 @@
|
|||
from typing import Optional
|
||||
|
||||
from packaging.version import parse as Version, InvalidVersion
|
||||
|
||||
import requests
|
||||
|
||||
def get_pypi_xmlrpc_client():
|
||||
"""This is actually deprecated client.
|
||||
"""
|
||||
import xmlrpc.client
|
||||
return xmlrpc.client.ServerProxy("https://pypi.python.org/pypi", use_datetime=True)
|
||||
|
||||
class PyPIClient:
|
||||
def __init__(self, host="https://pypi.org"):
|
||||
self._host = host
|
||||
self._session = requests.Session()
|
||||
|
||||
def project(self, package_name):
|
||||
response = self._session.get(
|
||||
"{host}/pypi/{project_name}/json".format(
|
||||
host=self._host,
|
||||
project_name=package_name
|
||||
)
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def project_release(self, package_name, version):
|
||||
response = self._session.get(
|
||||
"{host}/pypi/{project_name}/{version}/json".format(
|
||||
host=self._host,
|
||||
project_name=package_name,
|
||||
version=version
|
||||
)
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def get_ordered_versions(self, package_name):
|
||||
project = self.project(package_name)
|
||||
versions = [
|
||||
Version(package_version)
|
||||
for package_version
|
||||
in project["releases"].keys()
|
||||
]
|
||||
versions.sort()
|
||||
return versions
|
||||
|
||||
def get_relevant_versions(self, package_name):
|
||||
"""Return a tuple: (latest release, latest stable)
|
||||
If there are different, it means the latest is not a stable
|
||||
"""
|
||||
versions = self.get_ordered_versions(package_name)
|
||||
pre_releases = [version for version in versions if not version.is_prerelease]
|
||||
return (
|
||||
versions[-1],
|
||||
pre_releases[-1]
|
||||
)
|
|
@ -0,0 +1,2 @@
|
|||
[packaging]
|
||||
auto_update = false
|
|
@ -0,0 +1,45 @@
|
|||
import os
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
# This is a "fake" package, meaning it's not supposed to be released but used
|
||||
# locally with "pip install -e"
|
||||
|
||||
DEPENDENCIES = [
|
||||
# Packaging
|
||||
'packaging',
|
||||
'wheel',
|
||||
'Jinja2',
|
||||
'pytoml',
|
||||
'json-delta>=2.0',
|
||||
# Tests
|
||||
'pytest-cov',
|
||||
'pytest>=3.5.1',
|
||||
# 'azure-devtools>=0.4.1' override by packaging needs
|
||||
'readme_renderer',
|
||||
# 'azure-storage-file<2.0',
|
||||
'azure-storage-common<1.4.1',
|
||||
'pyopenssl',
|
||||
'azure-mgmt-resource',
|
||||
'azure-mgmt-storage',
|
||||
'azure-mgmt-keyvault'
|
||||
]
|
||||
|
||||
setup(
|
||||
name = "azure-sdk-tools",
|
||||
version = "0.0.0",
|
||||
author='Microsoft Corporation',
|
||||
author_email='azpysdkhelp@microsoft.com',
|
||||
url='https://github.com/Azure/azure-sdk-for-python',
|
||||
packages=find_packages(),
|
||||
long_description="Specific tools for Azure SDK for Python testing",
|
||||
install_requires=DEPENDENCIES,
|
||||
entry_points = {
|
||||
'console_scripts': [
|
||||
'generate_package=packaging_tools.generate_package:generate_main',
|
||||
'generate_sdk=packaging_tools.generate_sdk:generate_main',
|
||||
],
|
||||
},
|
||||
extras_require={
|
||||
":python_version>='3.5'": ['pytest-asyncio>=0.9.0']
|
||||
}
|
||||
)
|
|
@ -0,0 +1,29 @@
|
|||
{
|
||||
"$schema": "https://openapistorageprod.blob.core.windows.net/sdkautomation/prod/schemas/swagger_to_sdk_config.schema.json",
|
||||
"meta": {
|
||||
"after_scripts_in_repo_with_service": ["bash ./scripts/trenton_run.sh"],
|
||||
"autorest_options_for_otherSDK": {
|
||||
"use": "@microsoft.azure/autorest.go@2.1.134",
|
||||
"go": "",
|
||||
"sdkrel:go-sdk-folder": "./vendor",
|
||||
"use-onever": "",
|
||||
"preview-chk": "",
|
||||
"version": "V2"
|
||||
},
|
||||
"autorest_options": {
|
||||
"trenton": "",
|
||||
"use": "https://trenton.blob.core.windows.net/trenton/autorest-trenton-0.2.2.tgz",
|
||||
"sdkrel:output-folder": "./azurerm/internal/services",
|
||||
"clear-output-folder": "true"
|
||||
},
|
||||
"advanced_options": {
|
||||
"create_sdk_pull_requests": true,
|
||||
"sdk_generation_pull_request_base": "integration_branch"
|
||||
},
|
||||
"repotag": "azure-sdk-for-trenton",
|
||||
"envs": {
|
||||
"sdkrel:GOPATH": "../../../.."
|
||||
},
|
||||
"version": "0.1.0"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,168 @@
|
|||
{
|
||||
"swagger": "2.0",
|
||||
"info": {
|
||||
"version": "2020-01-01",
|
||||
"title": "TestServiceClient",
|
||||
"description": "Test Service Client for SDK Automation integration test."
|
||||
},
|
||||
"host": "management.azure.com",
|
||||
"schemes": [
|
||||
"https"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"security": [
|
||||
{
|
||||
"azure_auth": [
|
||||
"user_impersonation"
|
||||
]
|
||||
}
|
||||
],
|
||||
"securityDefinitions": {
|
||||
"azure_auth": {
|
||||
"type": "oauth2",
|
||||
"authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize",
|
||||
"flow": "implicit",
|
||||
"description": "Azure Active Directory OAuth2 Flow",
|
||||
"scopes": {
|
||||
"user_impersonation": "impersonate your user account"
|
||||
}
|
||||
}
|
||||
},
|
||||
"paths": {
|
||||
"/providers/Microsoft.TestService/test": {
|
||||
"get": {
|
||||
"operationId": "Test_Get",
|
||||
"description": "Get test.",
|
||||
"x-ms-examples": {
|
||||
"MsiOperationsList": {
|
||||
"$ref": "./examples/TestGet.json"
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"$ref": "#/parameters/ApiVersionParameter"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "The operation was successful.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/TestGetResult"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "Error response describing why the operation failed.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/CloudError"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"put": {
|
||||
"operationId": "Test_Put",
|
||||
"description": "Put test.",
|
||||
"x-ms-examples": {
|
||||
"MsiOperationsList": {
|
||||
"$ref": "./examples/TestPut.json"
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"$ref": "#/parameters/ApiVersionParameter"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "The operation was successful.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/TestPutResult"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "Error response describing why the operation failed.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/CloudError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"CloudError": {
|
||||
"type": "object",
|
||||
"x-ms-external": true,
|
||||
"properties": {
|
||||
"error": {
|
||||
"description": "A list of additional details about the error.",
|
||||
"$ref": "#/definitions/CloudErrorBody"
|
||||
}
|
||||
},
|
||||
"description": "An error response from the ManagedServiceIdentity service."
|
||||
},
|
||||
"CloudErrorBody": {
|
||||
"type": "object",
|
||||
"x-ms-external": true,
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the error."
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "A message describing the error, intended to be suitable for display in a user interface."
|
||||
},
|
||||
"target": {
|
||||
"type": "string",
|
||||
"description": "The target of the particular error. For example, the name of the property in error."
|
||||
},
|
||||
"details": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/CloudErrorBody"
|
||||
},
|
||||
"description": "A list of additional details about the error."
|
||||
}
|
||||
},
|
||||
"description": "An error response from the ManagedServiceIdentity service."
|
||||
},
|
||||
"TestGetResult": {
|
||||
"type": "object",
|
||||
"title": "Test Get.",
|
||||
"description": "Mocked result.",
|
||||
"properties": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"title": "Test result.",
|
||||
"description": "Test result."
|
||||
}
|
||||
}
|
||||
},
|
||||
"TestPutResult": {
|
||||
"type": "object",
|
||||
"title": "Test Put.",
|
||||
"description": "Mocked result.",
|
||||
"properties": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"title": "Test result.",
|
||||
"description": "Test result."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": {
|
||||
"ApiVersionParameter": {
|
||||
"name": "api-version",
|
||||
"in": "query",
|
||||
"description": "Version of API to invoke.",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"parameters": {
|
||||
"api-version": "2020-01-01"
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"value": "Test"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"parameters": {
|
||||
"api-version": "2020-01-01"
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"value": "Test"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
## AzureResourceSchema
|
||||
|
||||
These settings apply only when `--azureresourceschema` is specified on the command line.
|
||||
|
||||
### AzureResourceSchema multi-api
|
||||
|
||||
``` yaml $(azureresourceschema) && $(multiapi)
|
||||
batch:
|
||||
- tag: package-2020-01
|
||||
```
|
||||
|
||||
### Tag: package-2020-01 and azureresourceschema
|
||||
|
||||
These settings apply only when `--tag=package-2020-01 --azureresourceschema` is specified on the command line.
|
||||
Please also specify `--azureresourceschema-folder=<path to the root directory of your azure-resource-manager-schemas clone>`.
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01' && $(azureresourceschema)
|
||||
output-folder: $(azureresourceschema-folder)/schemas
|
||||
```
|
|
@ -0,0 +1,75 @@
|
|||
# Test Service
|
||||
|
||||
``` yaml
|
||||
openapi-type: arm
|
||||
tag: package-2020-01
|
||||
license-header: MICROSOFT_MIT_NO_VERSION
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01'
|
||||
input-file:
|
||||
- Microsoft.TestService/stable/2020-01-01/TestService.json
|
||||
```
|
||||
|
||||
``` yaml $(multiapi) && !$(track2)
|
||||
batch:
|
||||
- tag: package-2020-01
|
||||
```
|
||||
|
||||
``` yaml $(swagger-to-sdk)
|
||||
swagger-to-sdk:
|
||||
- repo: azure-sdk-for-go
|
||||
- repo: azure-sdk-for-js
|
||||
- repo: azure-sdk-for-java
|
||||
- repo: azure-sdk-for-python
|
||||
- repo: azure-sdk-for-net
|
||||
- repo: azure-sdk-for-trenton
|
||||
- repo: azure-cli-extensions
|
||||
- repo: azure-sdk-for-python-track2
|
||||
- repo: azure-resource-manager-schemas
|
||||
```
|
||||
|
||||
``` yaml $(go)
|
||||
go:
|
||||
license-header: MICROSOFT_APACHE_NO_VERSION
|
||||
namespace: testservice
|
||||
clear-output-folder: true
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01' && $(go)
|
||||
output-folder: $(go-sdk-folder)/services/$(namespace)/mgmt/2020-01-01/$(namespace)
|
||||
```
|
||||
|
||||
``` yaml $(typescript)
|
||||
typescript:
|
||||
azure-arm: true
|
||||
license-header: MICROSOFT_MIT_NO_VERSION
|
||||
payload-flattening-threshold: 2
|
||||
package-name: "@azure/test-service"
|
||||
output-folder: "$(typescript-sdks-folder)/sdk/testservice/arm-testservice"
|
||||
clear-output-folder: true
|
||||
generate-metadata: true
|
||||
```
|
||||
|
||||
|
||||
``` yaml $(cli)
|
||||
cli:
|
||||
cli-name: TestService
|
||||
azure-arm: true
|
||||
license-header: MICROSOFT_MIT_NO_VERSION
|
||||
payload-flattening-threshold: 2
|
||||
namespace: azure.mgmt.TestService
|
||||
package-name: azure-mgmt-TestService
|
||||
clear-output-folder: false
|
||||
```
|
||||
|
||||
``` yaml $(trenton)
|
||||
trenton:
|
||||
cli_name: TestService
|
||||
azure_arm: true
|
||||
license_header: MICROSOFT_MIT_NO_VERSION
|
||||
payload_flattening_threshold: 2
|
||||
namespace: azure.mgmt.TestService
|
||||
package_name: azure-mgmt-TestService
|
||||
clear_output_folder: false
|
||||
```
|
|
@ -0,0 +1,38 @@
|
|||
# Python
|
||||
|
||||
```yaml !$(track2)
|
||||
python:
|
||||
payload-flattening-threshold: 2
|
||||
namespace: azure.mgmt.testservice
|
||||
package-name: azure-mgmt-testservice
|
||||
package-version: 1.0.0
|
||||
clear-output-folder: true
|
||||
no-namespace-folders: true
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/
|
||||
```
|
||||
|
||||
```yaml $(track2)
|
||||
payload-flattening-threshold: 2
|
||||
namespace: azure.mgmt.testservice
|
||||
package-name: azure-mgmt-testservice
|
||||
package-version: 1.0.0
|
||||
clear-output-folder: true
|
||||
no-namespace-folders: true
|
||||
```
|
||||
|
||||
```yaml $(multiapi) && $(track2)
|
||||
batch:
|
||||
- tag: package-2020-01
|
||||
- multiapiscript: true
|
||||
```
|
||||
|
||||
```yaml $(multiapiscript)
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/
|
||||
clear-output-folder: false
|
||||
perform-load: false
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01' && $(track2)
|
||||
namespace: azure.mgmt.testservice.v2020_01
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/v2020_01
|
||||
```
|
|
@ -0,0 +1,132 @@
|
|||
{
|
||||
"swagger": "2.0",
|
||||
"info": {
|
||||
"version": "2020-02-01",
|
||||
"title": "TestServiceClient",
|
||||
"description": "Test Service Client for SDK Automation integration test."
|
||||
},
|
||||
"host": "management.azure.com",
|
||||
"schemes": [
|
||||
"https"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"security": [
|
||||
{
|
||||
"azure_auth": [
|
||||
"user_impersonation"
|
||||
]
|
||||
}
|
||||
],
|
||||
"securityDefinitions": {
|
||||
"azure_auth": {
|
||||
"type": "oauth2",
|
||||
"authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize",
|
||||
"flow": "implicit",
|
||||
"description": "Azure Active Directory OAuth2 Flow",
|
||||
"scopes": {
|
||||
"user_impersonation": "impersonate your user account"
|
||||
}
|
||||
}
|
||||
},
|
||||
"paths": {
|
||||
"/providers/Microsoft.TestService/test": {
|
||||
"get": {
|
||||
"operationId": "Test_Get",
|
||||
"description": "Get test.",
|
||||
"x-ms-examples": {
|
||||
"MsiOperationsList": {
|
||||
"$ref": "./examples/TestGet.json"
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"$ref": "#/parameters/ApiVersionParameter"
|
||||
},
|
||||
{
|
||||
"name": "test-param",
|
||||
"in": "query",
|
||||
"description": "New param to test in 2020-02-01",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "The operation was successful.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/TestGetResult"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "Error response describing why the operation failed.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/CloudError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"CloudError": {
|
||||
"x-ms-external": true,
|
||||
"properties": {
|
||||
"error": {
|
||||
"description": "A list of additional details about the error.",
|
||||
"$ref": "#/definitions/CloudErrorBody"
|
||||
}
|
||||
},
|
||||
"description": "An error response from the ManagedServiceIdentity service."
|
||||
},
|
||||
"CloudErrorBody": {
|
||||
"x-ms-external": true,
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the error."
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "A message describing the error, intended to be suitable for display in a user interface."
|
||||
},
|
||||
"target": {
|
||||
"type": "string",
|
||||
"description": "The target of the particular error. For example, the name of the property in error."
|
||||
},
|
||||
"details": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/CloudErrorBody"
|
||||
},
|
||||
"description": "A list of additional details about the error."
|
||||
}
|
||||
},
|
||||
"description": "An error response from the ManagedServiceIdentity service."
|
||||
},
|
||||
"TestGetResult": {
|
||||
"title": "Test Get.",
|
||||
"description": "Mocked result.",
|
||||
"properties": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"title": "Test result.",
|
||||
"description": "Test result."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": {
|
||||
"ApiVersionParameter": {
|
||||
"name": "api-version",
|
||||
"in": "query",
|
||||
"description": "Version of API to invoke.",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"parameters": {
|
||||
"api-version": "2020-01-01"
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"value": "Test"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
{}
|
|
@ -0,0 +1,82 @@
|
|||
# Test Service
|
||||
|
||||
``` yaml
|
||||
openapi-type: arm
|
||||
tag: package-2020-02
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01'
|
||||
input-file:
|
||||
- Microsoft.TestService/stable/2020-01-01/TestService.json
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-02'
|
||||
input-file:
|
||||
- Microsoft.TestService/stable/2020-02-01/TestService.json
|
||||
```
|
||||
|
||||
``` yaml $(multiapi) && !$(track2)
|
||||
batch:
|
||||
- tag: package-2020-01
|
||||
- tag: package-2020-02
|
||||
```
|
||||
|
||||
``` yaml $(swagger-to-sdk)
|
||||
swagger-to-sdk:
|
||||
- repo: azure-sdk-for-go
|
||||
- repo: azure-sdk-for-js
|
||||
- repo: azure-sdk-for-java
|
||||
- repo: azure-sdk-for-python
|
||||
- repo: azure-sdk-for-net
|
||||
- repo: azure-sdk-for-trenton
|
||||
- repo: azure-cli-extensions
|
||||
- repo: azure-sdk-for-python-track2
|
||||
```
|
||||
|
||||
``` yaml $(go)
|
||||
go:
|
||||
license-header: MICROSOFT_APACHE_NO_VERSION
|
||||
namespace: testservice
|
||||
clear-output-folder: true
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01' && $(go)
|
||||
output-folder: $(go-sdk-folder)/services/$(namespace)/mgmt/2020-01-01/$(namespace)
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-02' && $(go)
|
||||
output-folder: $(go-sdk-folder)/services/$(namespace)/mgmt/2020-02-01/$(namespace)
|
||||
```
|
||||
|
||||
``` yaml $(typescript)
|
||||
typescript:
|
||||
azure-arm: true
|
||||
license-header: MICROSOFT_MIT_NO_VERSION
|
||||
payload-flattening-threshold: 2
|
||||
package-name: "@azure/test-service"
|
||||
output-folder: "$(typescript-sdks-folder)/sdk/testservice/arm-testservice"
|
||||
clear-output-folder: true
|
||||
generate-metadata: true
|
||||
```
|
||||
|
||||
``` yaml $(cli)
|
||||
cli:
|
||||
cli-name: TestService
|
||||
azure-arm: true
|
||||
license-header: MICROSOFT_MIT_NO_VERSION
|
||||
payload-flattening-threshold: 2
|
||||
namespace: azure.mgmt.TestService
|
||||
package-name: azure-mgmt-TestService
|
||||
clear-output-folder: false
|
||||
```
|
||||
|
||||
``` yaml $(trenton)
|
||||
trenton:
|
||||
cli_name: TestService
|
||||
azure_arm: true
|
||||
license_header: MICROSOFT_MIT_NO_VERSION
|
||||
payload_flattening_threshold: 2
|
||||
namespace: azure.mgmt.TestService
|
||||
package_name: azure-mgmt-TestService
|
||||
clear_output_folder: false
|
||||
```
|
|
@ -0,0 +1,51 @@
|
|||
# Python
|
||||
|
||||
```yaml !$(track2)
|
||||
python:
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice
|
||||
payload-flattening-threshold: 2
|
||||
namespace: azure.mgmt.testservice
|
||||
package-name: azure-mgmt-testservice
|
||||
package-version: 1.0.1
|
||||
clear-output-folder: true
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/
|
||||
```
|
||||
|
||||
```yaml $(track2)
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice
|
||||
payload-flattening-threshold: 2
|
||||
namespace: azure.mgmt.testservice
|
||||
package-name: azure-mgmt-testservice
|
||||
package-version: 1.0.1
|
||||
clear-output-folder: true
|
||||
no-namespace-folders: true
|
||||
```
|
||||
|
||||
```yaml $(multiapi) && $(track2)
|
||||
batch:
|
||||
- tag: package-2020-01
|
||||
- tag: package-2020-02
|
||||
- multiapiscript: true
|
||||
```
|
||||
|
||||
```yaml $(multiapiscript)
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/
|
||||
clear-output-folder: false
|
||||
perform-load: false
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01'
|
||||
namespace: azure.mgmt.testservice.v2020_01
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/v2020_01
|
||||
python:
|
||||
namespace: azure.mgmt.testservice.v2020_01
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/v2020_01
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-02'
|
||||
namespace: azure.mgmt.testservice.v2020_02
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/v2020_02
|
||||
python:
|
||||
namespace: azure.mgmt.testservice.v2020_02
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/v2020_02
|
||||
```
|
|
@ -0,0 +1,3 @@
|
|||
# No update on Spec
|
||||
|
||||
Only update on empty markdown file.
|
|
@ -0,0 +1,125 @@
|
|||
{
|
||||
"swagger": "2.0",
|
||||
"info": {
|
||||
"version": "2020-01-01",
|
||||
"title": "AnotherServiceClient",
|
||||
"description": "Another Test Service Client for SDK Automation integration test."
|
||||
},
|
||||
"host": "management.azure.com",
|
||||
"schemes": [
|
||||
"https"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"security": [
|
||||
{
|
||||
"azure_auth": [
|
||||
"user_impersonation"
|
||||
]
|
||||
}
|
||||
],
|
||||
"securityDefinitions": {
|
||||
"azure_auth": {
|
||||
"type": "oauth2",
|
||||
"authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize",
|
||||
"flow": "implicit",
|
||||
"description": "Azure Active Directory OAuth2 Flow",
|
||||
"scopes": {
|
||||
"user_impersonation": "impersonate your user account"
|
||||
}
|
||||
}
|
||||
},
|
||||
"paths": {
|
||||
"/providers/Microsoft.TestService/test": {
|
||||
"get": {
|
||||
"operationId": "Test_Get",
|
||||
"description": "Get test.",
|
||||
"x-ms-examples": {
|
||||
"MsiOperationsList": {
|
||||
"$ref": "./examples/TestGet.json"
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"$ref": "#/parameters/ApiVersionParameter"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "The operation was successful.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/TestGetResult"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "Error response describing why the operation failed.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/CloudError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"CloudError": {
|
||||
"x-ms-external": true,
|
||||
"properties": {
|
||||
"error": {
|
||||
"description": "A list of additional details about the error.",
|
||||
"$ref": "#/definitions/CloudErrorBody"
|
||||
}
|
||||
},
|
||||
"description": "An error response from the ManagedServiceIdentity service."
|
||||
},
|
||||
"CloudErrorBody": {
|
||||
"x-ms-external": true,
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the error."
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "A message describing the error, intended to be suitable for display in a user interface."
|
||||
},
|
||||
"target": {
|
||||
"type": "string",
|
||||
"description": "The target of the particular error. For example, the name of the property in error."
|
||||
},
|
||||
"details": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/CloudErrorBody"
|
||||
},
|
||||
"description": "A list of additional details about the error."
|
||||
}
|
||||
},
|
||||
"description": "An error response from the ManagedServiceIdentity service."
|
||||
},
|
||||
"TestGetResult": {
|
||||
"title": "Test Get.",
|
||||
"description": "Mocked result.",
|
||||
"properties": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"title": "Test result.",
|
||||
"description": "Test result."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": {
|
||||
"ApiVersionParameter": {
|
||||
"name": "api-version",
|
||||
"in": "query",
|
||||
"description": "Version of API to invoke.",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"parameters": {
|
||||
"api-version": "2020-01-01"
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"value": "Test"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
# Test Service
|
||||
|
||||
``` yaml
|
||||
openapi-type: arm
|
||||
tag: package-2020-01
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01'
|
||||
input-file:
|
||||
- Microsoft.AnotherService/stable/2020-01-01/AnotherService.json
|
||||
```
|
||||
|
||||
``` yaml $(multiapi)
|
||||
batch:
|
||||
- tag: package-2020-01
|
||||
```
|
||||
|
||||
``` yaml $(swagger-to-sdk)
|
||||
swagger-to-sdk:
|
||||
- repo: azure-sdk-for-go
|
||||
- repo: azure-sdk-for-js
|
||||
- repo: azure-sdk-for-java
|
||||
- repo: azure-sdk-for-python
|
||||
- repo: azure-sdk-for-net
|
||||
- repo: azure-sdk-for-trenton
|
||||
- repo: azure-cli-extensions
|
||||
```
|
||||
|
||||
``` yaml $(go)
|
||||
go:
|
||||
license-header: MICROSOFT_APACHE_NO_VERSION
|
||||
namespace: anotherservice
|
||||
clear-output-folder: true
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01' && $(go)
|
||||
output-folder: $(go-sdk-folder)/services/$(namespace)/mgmt/2020-01-01/$(namespace)
|
||||
```
|
||||
|
||||
``` yaml $(typescript)
|
||||
typescript:
|
||||
azure-arm: true
|
||||
license-header: MICROSOFT_MIT_NO_VERSION
|
||||
payload-flattening-threshold: 2
|
||||
package-name: "@azure/another-service"
|
||||
output-folder: "$(typescript-sdks-folder)/sdk/anotherservice/arm-anotherservice"
|
||||
clear-output-folder: true
|
||||
generate-metadata: true
|
||||
```
|
||||
|
||||
``` yaml $(python)
|
||||
python:
|
||||
basic-setup-py: true
|
||||
output-folder: $(python-sdks-folder)/anotherservice/azure-mgmt-anotherservice
|
||||
azure-arm: true
|
||||
license-header: MICROSOFT_MIT_NO_VERSION
|
||||
payload-flattening-threshold: 2
|
||||
namespace: azure.mgmt.anotherservice
|
||||
package-name: azure-mgmt-anotherservice
|
||||
package-version: 1.0.0
|
||||
clear-output-folder: true
|
||||
```
|
||||
|
||||
```yaml $(python) && $(multiapi)
|
||||
batch:
|
||||
- tag: package-2020-01
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01' && $(python)
|
||||
python:
|
||||
namespace: azure.mgmt.anotherservice.v2020_01
|
||||
output-folder: $(python-sdks-folder)/anotherservice/azure-mgmt-anotherservice/azure/mgmt/anotherservice/v2020_01
|
||||
```
|
|
@ -0,0 +1,125 @@
|
|||
{
|
||||
"swagger": "2.0",
|
||||
"info": {
|
||||
"version": "2020-01-01",
|
||||
"title": "TestServiceClient",
|
||||
"description": "Test Service Client for SDK Automation integration test."
|
||||
},
|
||||
"host": "management.azure.com",
|
||||
"schemes": [
|
||||
"https"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"security": [
|
||||
{
|
||||
"azure_auth": [
|
||||
"user_impersonation"
|
||||
]
|
||||
}
|
||||
],
|
||||
"securityDefinitions": {
|
||||
"azure_auth": {
|
||||
"type": "oauth2",
|
||||
"authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize",
|
||||
"flow": "implicit",
|
||||
"description": "Azure Active Directory OAuth2 Flow",
|
||||
"scopes": {
|
||||
"user_impersonation": "impersonate your user account"
|
||||
}
|
||||
}
|
||||
},
|
||||
"paths": {
|
||||
"/providers/Microsoft.TestService/test": {
|
||||
"get": {
|
||||
"operationId": "Test_Get",
|
||||
"description": "Get test.",
|
||||
"x-ms-examples": {
|
||||
"MsiOperationsList": {
|
||||
"$ref": "./examples/TestGet.json"
|
||||
}
|
||||
},
|
||||
"parameters": [
|
||||
{
|
||||
"$ref": "#/parameters/ApiVersionParameter"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "The operation was successful.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/TestGetResult"
|
||||
}
|
||||
},
|
||||
"default": {
|
||||
"description": "Error response describing why the operation failed.",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/CloudError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"CloudError": {
|
||||
"x-ms-external": true,
|
||||
"properties": {
|
||||
"error": {
|
||||
"description": "A list of additional details about the error.",
|
||||
"$ref": "#/definitions/CloudErrorBody"
|
||||
}
|
||||
},
|
||||
"description": "An error response from the ManagedServiceIdentity service."
|
||||
},
|
||||
"CloudErrorBody": {
|
||||
"x-ms-external": true,
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "string",
|
||||
"description": "An identifier for the error."
|
||||
},
|
||||
"message": {
|
||||
"type": "string",
|
||||
"description": "A message describing the error, intended to be suitable for display in a user interface."
|
||||
},
|
||||
"target": {
|
||||
"type": "string",
|
||||
"description": "The target of the particular error. For example, the name of the property in error."
|
||||
},
|
||||
"details": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/CloudErrorBody"
|
||||
},
|
||||
"description": "A list of additional details about the error."
|
||||
}
|
||||
},
|
||||
"description": "An error response from the ManagedServiceIdentity service."
|
||||
},
|
||||
"TestGetResult": {
|
||||
"title": "Test Get.",
|
||||
"description": "Mocked result.",
|
||||
"properties": {
|
||||
"value": {
|
||||
"type": "string",
|
||||
"title": "Test result.",
|
||||
"description": "Test result."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"parameters": {
|
||||
"ApiVersionParameter": {
|
||||
"name": "api-version",
|
||||
"in": "query",
|
||||
"description": "Version of API to invoke.",
|
||||
"required": true,
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"parameters": {
|
||||
"api-version": "2020-01-01"
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"value": "Test"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,73 @@
|
|||
# Test Service
|
||||
|
||||
``` yaml
|
||||
openapi-type: arm
|
||||
tag: package-2020-01
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01'
|
||||
input-file:
|
||||
- Microsoft.TestService/stable/2020-01-01/TestService.json
|
||||
```
|
||||
|
||||
``` yaml $(multiapi)
|
||||
batch:
|
||||
- tag: package-2020-01
|
||||
```
|
||||
|
||||
``` yaml $(swagger-to-sdk)
|
||||
swagger-to-sdk:
|
||||
- repo: azure-sdk-for-go
|
||||
- repo: azure-sdk-for-js
|
||||
- repo: azure-sdk-for-java
|
||||
- repo: azure-sdk-for-python
|
||||
- repo: azure-sdk-for-net
|
||||
- repo: azure-sdk-for-trenton
|
||||
- repo: azure-cli-extensions
|
||||
```
|
||||
|
||||
``` yaml $(go)
|
||||
go:
|
||||
license-header: MICROSOFT_APACHE_NO_VERSION
|
||||
namespace: testservice
|
||||
clear-output-folder: true
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01' && $(go)
|
||||
output-folder: $(go-sdk-folder)/services/$(namespace)/mgmt/2020-01-01/$(namespace)
|
||||
```
|
||||
|
||||
``` yaml $(typescript)
|
||||
typescript:
|
||||
azure-arm: true
|
||||
license-header: MICROSOFT_MIT_NO_VERSION
|
||||
payload-flattening-threshold: 2
|
||||
package-name: "@azure/test-service"
|
||||
output-folder: "$(typescript-sdks-folder)/sdk/testservice/arm-testservice"
|
||||
clear-output-folder: true
|
||||
generate-metadata: true
|
||||
```
|
||||
|
||||
``` yaml $(python)
|
||||
python:
|
||||
basic-setup-py: true
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice
|
||||
azure-arm: true
|
||||
license-header: MICROSOFT_MIT_NO_VERSION
|
||||
payload-flattening-threshold: 2
|
||||
namespace: azure.mgmt.testservice
|
||||
package-name: azure-mgmt-testservice
|
||||
package-version: 1.0.0
|
||||
clear-output-folder: true
|
||||
```
|
||||
|
||||
```yaml $(python) && $(multiapi)
|
||||
batch:
|
||||
- tag: package-2020-01
|
||||
```
|
||||
|
||||
``` yaml $(tag) == 'package-2020-01' && $(python)
|
||||
python:
|
||||
namespace: azure.mgmt.testservice.v2020_01
|
||||
output-folder: $(python-sdks-folder)/testservice/azure-mgmt-testservice/azure/mgmt/testservice/v2020_01
|
||||
```
|
|
@ -0,0 +1,33 @@
|
|||
{
|
||||
"sdkRepositoryMappings": {
|
||||
"azure-sdk-for-go": {
|
||||
"mainRepository": "azure-sdk-for-go-test"
|
||||
},
|
||||
"azure-sdk-for-js": {
|
||||
"mainRepository": "azure-sdk-for-js-test"
|
||||
},
|
||||
"azure-sdk-for-python": {
|
||||
"mainRepository": "azure-sdk-for-python-test",
|
||||
"mainBranch": "release/v3"
|
||||
},
|
||||
"azure-sdk-for-python-track2": {
|
||||
"mainRepository": "azure-sdk-for-python-track2-test",
|
||||
"configFilePath": "swagger_to_sdk_custom_config.json"
|
||||
},
|
||||
"azure-sdk-for-trenton": {
|
||||
"mainRepository": "azure-sdk-for-trenton-test"
|
||||
},
|
||||
"azure-resource-manager-schemas": {
|
||||
"mainRepository": "azure-resource-manager-schemas-test"
|
||||
}
|
||||
},
|
||||
"overrides": {
|
||||
"azure-rest-api-specs-pr": {
|
||||
"sdkRepositoryMappings": {
|
||||
"azure-sdk-for-js": {
|
||||
"mainRepository": "azure-sdk-for-js-test-pr"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче