sdk generation pipeline docker (#3259)
This commit is contained in:
Родитель
c009706c7c
Коммит
bfa31f71af
|
@ -0,0 +1,4 @@
|
|||
dist/
|
||||
tmp
|
||||
*.js
|
||||
node_modules
|
|
@ -0,0 +1,68 @@
|
|||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"commonjs": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": [
|
||||
"google"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": "2019"
|
||||
},
|
||||
"plugins": [
|
||||
"@typescript-eslint",
|
||||
"simple-import-sort"
|
||||
],
|
||||
"rules": {
|
||||
"indent": ["error", 4],
|
||||
"max-len": ["error", 160],
|
||||
"comma-dangle": ["error", "never"],
|
||||
"require-jsdoc": ["error", {
|
||||
"require": {
|
||||
"FunctionDeclaration": false,
|
||||
"MethodDefinition": false,
|
||||
"ClassDeclaration": false,
|
||||
"ArrowFunctionExpression": false,
|
||||
"FunctionExpression": false
|
||||
}
|
||||
}],
|
||||
"object-curly-spacing": ["error", "always"],
|
||||
"simple-import-sort/imports": "error",
|
||||
"new-cap": "off",
|
||||
"valid-jsdoc": "off",
|
||||
"@typescript-eslint/naming-convention": [
|
||||
"error",
|
||||
{
|
||||
"selector": ["class", "interface", "enum", "enumMember", "typeParameter", "typeLike", "default"],
|
||||
"format": ["PascalCase"],
|
||||
"filter": {
|
||||
"regex": "^_$",
|
||||
"match": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"selector": [ "variable", "parameter", "function", "method", "property", "memberLike"],
|
||||
"format": ["camelCase"],
|
||||
"filter": {
|
||||
"regex": "^_$",
|
||||
"match": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"selector": ["default"],
|
||||
"modifiers": ["global", "const"],
|
||||
"format": ["UPPER_CASE"],
|
||||
"filter": {
|
||||
"regex": "^_$",
|
||||
"match": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"selector": ["objectLiteralProperty"],
|
||||
"format": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
FROM mcr.microsoft.com/dotnet/runtime:3.1-focal
|
||||
|
||||
RUN apt-get update -y && apt upgrade -y && apt install curl -y
|
||||
RUN apt install build-essential -y
|
||||
|
||||
# install java
|
||||
ENV JAVA_HOME /usr/java/openjdk-17
|
||||
ENV PATH $JAVA_HOME/bin:$PATH
|
||||
ENV LANG en_US.UTF-8
|
||||
ENV JAVA_VERSION 17.0.1
|
||||
RUN curl -fL -o openjdk.tgz https://download.java.net/java/GA/jdk17.0.1/2a2082e5a09d4267845be086888add4f/12/GPL/openjdk-17.0.1_linux-x64_bin.tar.gz
|
||||
RUN mkdir -p "$JAVA_HOME"
|
||||
RUN tar --extract --file openjdk.tgz --directory "$JAVA_HOME" --strip-components 1 --no-same-owner
|
||||
RUN rm openjdk.tgz
|
||||
## install maven
|
||||
ENV M2_HOME /usr/maven
|
||||
ENV MAVEN_HOME=/usr/maven
|
||||
ENV PATH $M2_HOME/bin:$PATH
|
||||
RUN mkdir -p "$MAVEN_HOME"
|
||||
RUN curl -fL -o maven.tgz https://dlcdn.apache.org/maven/maven-3/3.8.5/binaries/apache-maven-3.8.5-bin.tar.gz
|
||||
RUN tar --extract --file maven.tgz --directory "$MAVEN_HOME" --strip-components 1 --no-same-owner
|
||||
RUN rm maven.tgz
|
||||
|
||||
# install node
|
||||
RUN curl -sL https://deb.nodesource.com/setup_14.x | bash -
|
||||
RUN apt -y install nodejs
|
||||
|
||||
# install python
|
||||
RUN apt install python3-pip -y && apt install python3-venv -y && pip3 install --upgrade pip
|
||||
|
||||
# install powershell
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt-get install -y wget apt-transport-https software-properties-common
|
||||
RUN wget -q https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb
|
||||
RUN dpkg -i packages-microsoft-prod.deb
|
||||
RUN apt-get update && apt-get install -y powershell
|
||||
RUN rm packages-microsoft-prod.deb
|
||||
|
||||
# install .NET
|
||||
RUN wget https://dotnet.microsoft.com/download/dotnet/scripts/v1/dotnet-install.sh && chmod 777 ./dotnet-install.sh
|
||||
RUN bash ./dotnet-install.sh
|
||||
RUN bash ./dotnet-install.sh -c 3.1
|
||||
ENV DOTNET_ROOT=/root/.dotnet PATH=$PATH:/root/.dotnet
|
||||
RUN rm /dotnet-install.sh
|
||||
RUN apt-get install -y dotnet-sdk-6.0
|
||||
|
||||
# install git
|
||||
RUN add-apt-repository ppa:git-core/ppa -y && apt update && apt upgrade -y && apt install git -y
|
||||
RUN git config --global credential.helper store && git config --global core.fileMode false
|
||||
|
||||
# install depended packages
|
||||
RUN pip3 install --upgrade wheel PyYAML requests
|
||||
RUN npm install -g typescript
|
||||
RUN npm install -g @microsoft/rush
|
||||
RUN npm install -g autorest
|
||||
|
||||
RUN mkdir "/mock-host" && cd /mock-host && npm install @azure-tools/mock-service-host
|
||||
|
||||
# install chrome
|
||||
RUN curl -LO https://dl.google.com/linux/direct/google-chrome-stable_current_amd64.deb
|
||||
RUN DEBIAN_FRONTEND=noninteractive apt-get install -y ./google-chrome-stable_current_amd64.deb
|
||||
RUN rm google-chrome-stable_current_amd64.deb
|
||||
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true PUPPETEER_EXECUTABLE_PATH=/usr/bin/google-chrome-stable
|
||||
|
||||
# install vscode code server and extensions
|
||||
COPY scripts/install-vscode-server.sh /install-vscode-server.sh
|
||||
RUN sh /install-vscode-server.sh
|
||||
|
||||
# install docker because it's required by test proxy
|
||||
RUN curl -fsSL https://test.docker.com -o docker.sh && sh docker.sh
|
||||
|
||||
COPY packages/sdk-generation-cli/*.tgz /pack.tgz
|
||||
RUN npm install -g /pack.tgz
|
||||
|
||||
COPY scripts/entrypoint.sh /entrypoint.sh
|
||||
COPY scripts/rerun-tasks /usr/bin/rerun-tasks
|
||||
COPY scripts/change-owner.sh /change-owner.sh
|
||||
|
||||
ENTRYPOINT ["bash", "/entrypoint.sh"]
|
|
@ -35,24 +35,20 @@ variables:
|
|||
- name: NodeVersion
|
||||
value: '14.x'
|
||||
- name: VAR_ARTIFACT_NAME
|
||||
value: 'drop'
|
||||
value: 'packages'
|
||||
- name: VAR_DOCKER_IMAGE_ARTIFACT_NAME
|
||||
value: 'dockerImages'
|
||||
- name: VAR_BUILD_ARTIFACT_STAGING_DIRECTORY
|
||||
value: $(Build.ArtifactStagingDirectory)
|
||||
|
||||
pool:
|
||||
name: "azsdk-pool-mms-ubuntu-2004-general"
|
||||
vmImage: "MMSUbuntu20.04"
|
||||
|
||||
stages:
|
||||
- stage: InstallAndBuild
|
||||
- stage: Build
|
||||
jobs:
|
||||
- job: Build
|
||||
strategy:
|
||||
matrix:
|
||||
linux:
|
||||
imageName: 'ubuntu-latest'
|
||||
mac:
|
||||
imageName: 'macos-latest'
|
||||
windows:
|
||||
imageName: 'windows-latest'
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
|
@ -69,6 +65,16 @@ stages:
|
|||
displayName: 'rush update'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline
|
||||
|
||||
- script: |
|
||||
rushx lint
|
||||
displayName: 'Lint @azure-tools/sdk-generation-lib'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline/packages/sdk-generation-lib
|
||||
|
||||
- script: |
|
||||
rushx lint
|
||||
displayName: 'Lint @azure-tools/sdk-generation-cli'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline/packages/sdk-generation-cli
|
||||
|
||||
- script: |
|
||||
rush build
|
||||
displayName: 'rush build'
|
||||
|
@ -84,26 +90,127 @@ stages:
|
|||
displayName: 'Pack @azure-tools/sdk-generation-cli'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline/packages/sdk-generation-cli
|
||||
|
||||
- script: 'cp azure-tools-sdk-generation-lib-*.tgz $(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)'
|
||||
displayName: 'copy @azure-tools/sdk-generation-lib to staging dir'
|
||||
- script: |
|
||||
mkdir -p $(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)/packages
|
||||
cp azure-tools-sdk-generation-lib-*.tgz $(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)/packages/
|
||||
displayName: 'Copy @azure-tools/sdk-generation-lib to staging dir'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline/packages/sdk-generation-lib
|
||||
condition: contains(variables['imageName'], 'ubuntu')
|
||||
|
||||
- script: 'cp azure-tools-sdk-generation-cli-*.tgz $(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)'
|
||||
displayName: 'copy @azure-tools/sdk-generation-cli to staging dir'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline/packages/sdk-generation-cli
|
||||
condition: contains(variables['imageName'], 'ubuntu')
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish packages'
|
||||
inputs:
|
||||
PathtoPublish: '$(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)'
|
||||
PathtoPublish: '$(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)/packages'
|
||||
ArtifactName: '$(VAR_ARTIFACT_NAME)'
|
||||
publishLocation: 'Container'
|
||||
condition: contains(variables['imageName'], 'ubuntu')
|
||||
|
||||
- script: 'docker build -t sdkgeneration.azurecr.io/sdk-generation:$(Build.BuildId) .'
|
||||
displayName: 'Build docker image'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline
|
||||
|
||||
- script: |
|
||||
mkdir -p $(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)/docker-images
|
||||
docker image save sdkgeneration.azurecr.io/sdk-generation:$(Build.BuildId) -o $(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)/docker-images/image.tar
|
||||
displayName: 'Save docker image'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish docker image'
|
||||
inputs:
|
||||
PathtoPublish: '$(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)/docker-images'
|
||||
ArtifactName: '$(VAR_DOCKER_IMAGE_ARTIFACT_NAME)'
|
||||
publishLocation: 'Container'
|
||||
|
||||
- stage: Test
|
||||
dependsOn: Build
|
||||
condition: succeeded()
|
||||
jobs:
|
||||
- job: UnitTestForCli
|
||||
displayName: Unit Test For Cli Package
|
||||
condition: always()
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '$(NodeVersion)'
|
||||
displayName: 'Install Node.js'
|
||||
|
||||
- bash: |
|
||||
npm install -g @microsoft/rush
|
||||
npm install -g typescript@4.6.2
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: |
|
||||
rush update
|
||||
displayName: 'rush update'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline
|
||||
|
||||
- script: |
|
||||
rush build
|
||||
displayName: 'rush build'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline
|
||||
|
||||
- script: |
|
||||
npm i -g autorest
|
||||
rushx test:unit
|
||||
displayName: 'Test @azure-tools/sdk-generation-cli'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline/packages/sdk-generation-cli
|
||||
|
||||
- job: IntegrationTest
|
||||
condition: always()
|
||||
displayName: Integration Test for
|
||||
strategy:
|
||||
matrix:
|
||||
JS:
|
||||
sdkRepo: 'azure-sdk-for-js'
|
||||
JAVA:
|
||||
sdkRepo: 'azure-sdk-for-java'
|
||||
PYTHON:
|
||||
sdkRepo: 'azure-sdk-for-python'
|
||||
GO:
|
||||
sdkRepo: 'azure-sdk-for-go'
|
||||
Net:
|
||||
sdkRepo: 'azure-sdk-for-net'
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '$(NodeVersion)'
|
||||
displayName: 'Install Node.js'
|
||||
|
||||
- bash: |
|
||||
npm install -g @microsoft/rush
|
||||
npm install -g typescript@4.6.2
|
||||
displayName: 'Install dependencies'
|
||||
|
||||
- script: |
|
||||
rush update
|
||||
displayName: 'rush update'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline
|
||||
|
||||
- script: |
|
||||
rush build
|
||||
displayName: 'rush build'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline
|
||||
|
||||
- task: DownloadBuildArtifacts@0
|
||||
inputs:
|
||||
buildType: 'current'
|
||||
downloadType: 'single'
|
||||
artifactName: '$(VAR_DOCKER_IMAGE_ARTIFACT_NAME)'
|
||||
downloadPath: '$(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)'
|
||||
displayName: 'Download image'
|
||||
|
||||
- bash: |
|
||||
docker load --input $(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)/$(VAR_DOCKER_IMAGE_ARTIFACT_NAME)/image.tar
|
||||
displayName: 'Docker Load'
|
||||
|
||||
- bash: |
|
||||
set -e
|
||||
rushx test:integration --docker-image="sdkgeneration.azurecr.io/sdk-generation:$(Build.BuildId)" --sdk-repo=$(sdkRepo)
|
||||
displayName: 'Run integration test'
|
||||
workingDirectory: $(System.DefaultWorkingDirectory)/tools/sdk-generation-pipeline/packages/sdk-generation-cli
|
||||
|
||||
- ${{if ne(variables['Build.Reason'], 'PullRequest')}}:
|
||||
- stage: Release
|
||||
dependsOn: InstallAndBuild
|
||||
dependsOn: Test
|
||||
condition: succeeded()
|
||||
jobs:
|
||||
- job: approve
|
||||
|
@ -136,7 +243,7 @@ stages:
|
|||
|
||||
echo -e "\e[32m[$(date -u)] LOG: publish the package"
|
||||
echo "//registry.npmjs.org/:_authToken=$(azure-sdk-npm-token)" >> ~/.npmrc
|
||||
for file in $(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)/$(VAR_ARTIFACT_NAME)/*.tgz
|
||||
for file in $(VAR_BUILD_ARTIFACT_STAGING_DIRECTORY)/$(VAR_ARTIFACT_NAME)/$(VAR_ARTIFACT_NAME)/*.tgz
|
||||
do
|
||||
echo -e "\e[32m[$(date -u)] LOG: File: $file"
|
||||
npm publish $file --access public || { echo 'publish $file failed' ; }
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,129 @@
|
|||
We provide a docker image, which can be used to generate code, run mock test. This docker image can be used for local
|
||||
development, and running in pipeline.
|
||||
|
||||
This document only describes the usages of docker image, if you want to get more information about the design details of
|
||||
docker, please go to [design specs of docker image](docker-image-design.md).
|
||||
|
||||
# Prerequisites
|
||||
Suggest to run the command by using wsl docker if you are using Windows machine because the docker container will request your local file system frequently, and wsl docker is much faster than running it in Windows directly.
|
||||
|
||||
# Docker IMAGE COMMANDS
|
||||
|
||||
The docker image will be used in different scenarios:
|
||||
|
||||
1. Run docker container in local (generate codes and do grow up development).
|
||||
2. Run docker container in pipeline.
|
||||
|
||||
## RUN DOCKER CONTAINER IN LOCAL
|
||||
|
||||
### RUN DOCKER CONTAINER TO GENERATE CODES AND DO GROW UP DEVELOPMENT
|
||||
|
||||
Command
|
||||
|
||||
```shell
|
||||
docker run -it --privileged -v {local_spec_repo_path}:/spec-repo -v {local_work_folder}:/work-dir docker.image:latest --readme={relative_readme} --sdk={sdk_to_generate}
|
||||
```
|
||||
|
||||
Parameter description:
|
||||
|
||||
| Parameter | Description | Example |
|
||||
|--------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------|
|
||||
| { local_spec_repo_path } | Required. It's used to point to the swagger folder. | /home/test/azure-rest-api-specs |
|
||||
| { local_work_folder} | Required. It's used to point to the work folder, which will store all sdk repositories. If there is no sdk repository in the folder, the docker image will clone it | /home/test/sdk-repos |
|
||||
| { relative_readme } | Required. It's used to specify the readme.md file and docker image uses it to generate SDKs. it's the relative path from {path_to_local_spec_repo} | specification/agrifood/resource-manager/readme.md |
|
||||
| { sdk_to_generate } | Required. It's used to specify which language of sdk you want to generate. Supported value for management sdk: js, java, python, .net, and go. Supported value for dataplane sdk: js, java, python, and .net. If you want to generate multi-packages, use comma to separate them. (__Not recommend to generate multi packages in one docker container because the docker will failed when encoutering error in generating one sdk, and the remaining sdk will be generated__) | js,java |
|
||||
|
||||
Example Command:
|
||||
```shell
|
||||
docker run -it --privileged -v /home/test/azure-rest-api-specs:/spec-repo -v /home/test/work-dir:/work-dir docker.image:latest --readme="specification/agrifood/resource-manager/readme.md" --sdk=js,java
|
||||
```
|
||||
|
||||
After running command, docker container generates SDKs. When SDKs are generated, the docker container doesn't exit, and you can open your browser and request `http://127.0.0.1:8080/?folder=/work-dir` for further grow up development.
|
||||
If you want to re-generate codes after grow up development or changing swagger, please run command in docker container:
|
||||
```shell
|
||||
rerun-tasks -readme={relative_readme} --sdk={sdk_to_generate}
|
||||
```
|
||||
rerun-tasks is a script, which invokes task engine to re-run tasks.
|
||||
|
||||
**Attention**: rerun-tasks may clear your manual change, which depends whether there is `clear-output-folder: true` in the `readme.<langauge>.md`. Also, if your manual codes in a file which has the same name as generated one, it will also be overridden.
|
||||
|
||||
### RUN DOCKER CONTAINER TO DO GROW UP DEVELOPMENT
|
||||
There are two scenarios here:
|
||||
1. Service team has generated codes locally by using docker image and has exited the docker container. But they want to do grow up development now.
|
||||
2. Service team has generated codes by using sdk generation pipeline, and sdk generation pipeline creates a work branch. Service team hope to do grow up based on the work branch.
|
||||
|
||||
Compared to scenario 1, scenario 2 needs user to clone and checkout the work branch by themselves. It’s very simple with git:
|
||||
```shell
|
||||
cd {local_work_folder}
|
||||
git clone -b {work-branch} {repo-url}
|
||||
```
|
||||
Parameter description:
|
||||
|
||||
| Parameter | Description | Example |
|
||||
|-----------------------|-----------------------------------------------------------------------------------------|-----------------------------------------------|
|
||||
| { local_work_folder } | Required. It's used to point to the work folder, which stores all sdk repositories. | /home/test/work-dir |
|
||||
| {work_branch } | Required. It's used to point the work branch name generated by SDK generation pipeline. | sdkAuto/workbranch |
|
||||
| {repo_url} | Required. It's used to point the repository url that work branch is in. | https://github.com/Azure/azure-sdk-for-js.git |
|
||||
|
||||
Then run docker commands to do grow up development:
|
||||
```shell
|
||||
docker run -it --privileged -v {local_spec_repo_path}:/spec-repo -v {local_work_folder}:/work-dir docker.image:latest --readme={relative_readme}
|
||||
```
|
||||
Parameter description:
|
||||
|
||||
| Parameter | Description | Example |
|
||||
|--------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------|
|
||||
| { local_spec_repo_path } | Optional. If you want to change the swagger and re-generate codes, you need to mount the swagger repo. If you only want to do grow up development, no need to mount it. | /home/test/azure-rest-api-specs |
|
||||
| { local_work_folder } | Required. It's used to point to the work folder, which stores all sdk repositories. | /home/test/work-dir |
|
||||
| { relative_readme } | Optional. It's used to specify the readme.md file and docker image uses it to start mock server. it's the relative path from {path_to_local_spec_repo}. If not specified, mock server will not start. | specification/agrifood/resource-manager/readme.md |
|
||||
|
||||
Example Command:
|
||||
```shell
|
||||
docker run -it --privileged -v /home/test/azure-rest-api-specs:/spec-repo -v /home/test/work-dir:/work-dir docker.image:latest
|
||||
```
|
||||
|
||||
After running command, docker container generates SDKs. When SDKs are generated, the docker container doesn't exit, and you can open your browser and request `http://127.0.0.1:8080/?folder=/work-dir` for further grow up development.
|
||||
If you want to re-generate codes after grow up development or changing swagger, please run command in docker container:
|
||||
```shell
|
||||
rerun-tasks -readme={relative_readme} --sdk={sdk_to_generate}
|
||||
```
|
||||
rerun-tasks is a script, which invokes task engine to re-run tasks.
|
||||
|
||||
**Attention**: rerun-tasks may clear your manual change, which depends whether there is `clear-output-folder: true` in the `readme.<langauge>.md`. Also, if your manual codes in a file which has the same name as generated one, it will also be overridden.
|
||||
|
||||
## RUN DOCKER CONTAINER IN PIPELINE
|
||||
The docker image also can be used by SDK Generation Pipeline. Moreover, if the service team wants to integrate the docker image in their CI pipeline, the method of integration is the same here.
|
||||
|
||||
Before running docker command, pipeline must prepare the spec repo and sdk repo.
|
||||
|
||||
Command:
|
||||
|
||||
```shell
|
||||
docker run --privileged -v {spec_repo_path}:/spec-repo -v {sdk_repo_path}:/sdk-repo -v {output_folder_path}:/tmp/output docker.image:latest --readme={relative_readme}
|
||||
```
|
||||
|
||||
Parameter description:
|
||||
|
||||
| Parameter | Description | Example |
|
||||
|---------------------|----------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------|
|
||||
| { spec_repo_path } | Required. It's used to point to the swagger folder. | /home/test/azure-rest-api-specs |
|
||||
| { sdk_repo_path } | Required. It's used to point to the sdk repository. | /home/test/sdk-repos |
|
||||
| { relative_readme } | Required. It's used to specify the readme.md file and docker image uses it to generate SDKs. it's the relative path from {path_to_local_spec_repo} | specification/agrifood/resource-manager/readme.md |
|
||||
|
||||
Example Command:
|
||||
```shell
|
||||
docker run -v /var/run/docker.sock:/var/run/docker.sock -v /home/vsts/work/azure-rest-api-specs:/spec-repo -v /home/vsts/work/azure-sdk-for-js:/sdk-repo -v /home/vsts/work/output:/tmp/output docker.image:latest --readme=specification/agrifood/resource-manager/readme.md
|
||||
```
|
||||
|
||||
After running the command in pipeline, docker will execute tasks automatically. Also, there will be output files generated, which will be used by pipeline's other job, such as upload codes, parsing logs.
|
||||
The following is the full list of generated files:
|
||||
|
||||
| File Types | Files | Description | Schema/Example |
|
||||
|-------------|-----------------------------|--------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------|
|
||||
| Logs | init-task.log | It contains all logs while executing init task. | 2022-03-24 03:35:12 xxxxxxx |
|
||||
| Logs | generateAndBuild-task.log | It contains all logs while executing generate and build task. | 2022-03-24 03:35:12 xxxxxxx |
|
||||
| Logs | mockTest-task.log | It contains all logs while executing mock test task. | 2022-03-24 03:35:12 xxxxxxx |
|
||||
| Outputs | initOutput.json | It contains the output of init task | [InitOutputSchema.json](../task-engine/schema/InitOutputSchema.json) |
|
||||
| Outputs | generateAndBuildOutput.json | It contains the output of generateAndBuildOutput script, such as the path to generated codes, artifacts and so on. | [GenerateAndBuildOutputSchema.json](../task-engine/schema/GenerateAndBuildOutputSchema.json) |
|
||||
| Outputs | mockTestOutput.json | It contains the output of mock test task | [TestOutputSchema.json](../task-engine/schema/TestOutputSchema.json) |
|
||||
| Outputs | taskResults.json | It contains each task execution result | { "init": "success" } |
|
|
@ -0,0 +1,16 @@
|
|||
# ARCHITECTURE DIAGRAM
|
||||
![docker design](docker-image-design.md)
|
||||
|
||||
The core of the docker image is task engine, which contains four tasks: Init Task, Generate and Build Task, Mock Test Task. There is a configuration file in each sdk repository, and it defines which task should be executed. To serve different users/pipeline, we provide different docker commands. Also, after the tasks are executed, there are some outputs, such as generated codes, task execution result, which can be used by following steps in pipeline or service team.
|
||||
|
||||
# TASK ENGINE
|
||||
There are mainly four tasks defined in task engine: Init Task, Generate and Build Task, Mock Test Task, Live Test Task. Task engine executes these tasks based on a configuration file in sdk repository, and you can find [the schema of configuration file here](../task-engine/schema/CodegenToSdkConfigSchema.json), and [the example](../task-engine/README.md).
|
||||
|
||||
As the docker image will be used in different scenarios, we hope to extract the most common part into the docker image, and the specific parts will be removed.
|
||||
|
||||
About the schemas of input/output of each task, please refer to [schemas](../task-engine/schema).
|
||||
|
||||
# DOCKER IMAGE LAYERS
|
||||
The docker image will be based on Ubuntu, and it also contains all the development environment for different languages of sdk. So the overall structure of layers is the following:
|
||||
|
||||
![layer](images/docker-image-layers.drawio.png)
|
Двоичные данные
tools/sdk-generation-pipeline/documents/docker/images/docker-design.drawio.png
Normal file
Двоичные данные
tools/sdk-generation-pipeline/documents/docker/images/docker-design.drawio.png
Normal file
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 70 KiB |
Двоичные данные
tools/sdk-generation-pipeline/documents/docker/images/docker-image-layers.drawio.png
Normal file
Двоичные данные
tools/sdk-generation-pipeline/documents/docker/images/docker-image-layers.drawio.png
Normal file
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 22 KiB |
Двоичные данные
tools/sdk-generation-pipeline/documents/docker/images/extension-remote-containers.png
Executable file
Двоичные данные
tools/sdk-generation-pipeline/documents/docker/images/extension-remote-containers.png
Executable file
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 20 KiB |
|
@ -0,0 +1,27 @@
|
|||
# Use Vscode to Connect Docker Container
|
||||
It's not easy to develop within docker container. However, vscode provides us a way to connect to docker container, and you can use it to write codes easily.
|
||||
|
||||
## Prerequisites
|
||||
- Install vscode in your computer.
|
||||
- Install extension **Remote - Containers**.
|
||||
![vsocde connects docker containers](images/extension-remote-containers.png)
|
||||
|
||||
## Steps
|
||||
Please follow the following steps to connect your vscode to docker container.
|
||||
1. Press `F1` and select `Remote-Containers: Attach to Running Container`.
|
||||
2. Select your running docker image, and attach to it.
|
||||
3. After vscode connects to docker container, open folder `/work-dir/{sdk-repository}`.
|
||||
1. For .Net, you can only open the generated SDK namespace folder, such as `Azure.Verticals.AgriFood.Farming`.
|
||||
|
||||
Then you can write your codes in vscode.
|
||||
|
||||
## FAQ
|
||||
1. Vscode C# extension cannot load the project correctly.
|
||||
|
||||
Answer: Vscode C# extension is based on OmniSharp, which sometimes make us confused. To resolve it:
|
||||
1. Run `dotnet build` to rebuild the project
|
||||
2. In vscode, press `ctrl + shift + p` and then type `Restart Omnisharp`.
|
||||
|
||||
2. Vscode cannot load java project correctly.
|
||||
|
||||
Answer: Java repository is very large and vscode extension for Java needs to spend much time to load the project. You can check the progress in the terminal of `Java Build Status`.
|
|
@ -16,9 +16,6 @@ SDK Automation is launched in azure pipeline. It runs tasks in the following ste
|
|||
|
||||
4. Launch __mockTestTask__ to run mock test with [mockTestInput.json](#mocktestinput). The script should produce [mockTestOutput.json](#mocktestoutput). Then the [mockTestOutput.json](#mocktestoutput) will be parsed and the test result will be stored in database.
|
||||
|
||||
5. Launch __liveTestTask__ to run mock test with [liveTestInput.json](#livetestinput). The script should produce [liveTestOutput.json](#livetestoutput). Then the [liveTestOutput.json](#livetestoutput) will be parsed and the test result will be stored in database.
|
||||
|
||||
|
||||
## Definitions
|
||||
|
||||
### CodegenToSdkConfig
|
||||
|
|
|
@ -72,15 +72,10 @@
|
|||
// Script path related to repo root
|
||||
"type": "string"
|
||||
},
|
||||
"script": {
|
||||
// the script type, e.g. pwsh
|
||||
"type": "string"
|
||||
},
|
||||
"envs": {
|
||||
// Not Implemented
|
||||
// Extra environment variable to be passed to the script (except initScript).
|
||||
// By default the following envs will be passed:
|
||||
// PATH, SHELL, PWD (current directory)
|
||||
// PWD (current directory)
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
|
@ -94,26 +89,6 @@
|
|||
"logFilter": {
|
||||
// filter for error msg and warning msg.
|
||||
"$ref": "#/definitions/LogFilter"
|
||||
},
|
||||
"exitWithNonZeroCode": {
|
||||
"properties": {
|
||||
// How should SDK Automation handle non-zero exitCode.
|
||||
"storeLog": {
|
||||
// Should we store this error.
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
"result": {
|
||||
// If script has non-error exitCode how should we mark the script's result.
|
||||
"type": "string",
|
||||
"enum": ["error", "warning", "ignore"],
|
||||
"default": "error"
|
||||
}
|
||||
},
|
||||
"storeAllLog": {
|
||||
"show": true,
|
||||
"result": "error"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["path"]
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"type": "object",
|
||||
"properties": {
|
||||
"specFolder": {
|
||||
// Path to local spec folder. path to the parent of resourceProvider folders. e.g. azure-rest-api-specs/specifications
|
||||
// Path to local spec folder. path to the parent of service folders. e.g. azure-rest-api-specs/specifications
|
||||
"type": "string"
|
||||
},
|
||||
"headSha": {
|
||||
|
|
|
@ -1,69 +1,69 @@
|
|||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"packages": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PackageResult"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["packages"],
|
||||
"definitions": {
|
||||
"PackageResult": {
|
||||
"properties": {
|
||||
"packageName": {
|
||||
// Name of package. Will be used in branch name, PR title and the folder name to store the generated codes.
|
||||
"type": "string"
|
||||
},
|
||||
"result": {
|
||||
// Status of package. By default it's succeeded.
|
||||
"type": "string",
|
||||
"enum": ["failed", "succeeded", "warning"],
|
||||
"default": "succeeded"
|
||||
},
|
||||
"path": {
|
||||
// List of package content paths.
|
||||
// If the path points to a folder then
|
||||
// all the content under the folder will be included.
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"packageFolder": {
|
||||
// The path of package folder.
|
||||
"type": "string"
|
||||
},
|
||||
"changelog": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content": {
|
||||
// Content of changelog in markdown
|
||||
"type": "string"
|
||||
},
|
||||
"hasBreakingChange": {
|
||||
// Does the new package has breaking change
|
||||
"type": "boolean"
|
||||
},
|
||||
"breakingChangeItems": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"packages": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/PackageResult"
|
||||
}
|
||||
},
|
||||
"required": ["content"]
|
||||
},
|
||||
"artifacts": {
|
||||
// The path to artifacts
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["packageName", "path", "packageFolder"]
|
||||
},
|
||||
"required": ["packages"],
|
||||
"definitions": {
|
||||
"PackageResult": {
|
||||
"properties": {
|
||||
"packageName": {
|
||||
// Name of package. Will be used in branch name, PR title and the folder name to store the generated codes.
|
||||
"type": "string"
|
||||
},
|
||||
"result": {
|
||||
// Status of package. By default it's succeeded.
|
||||
"type": "string",
|
||||
"enum": ["failed", "succeeded"],
|
||||
"default": "succeeded"
|
||||
},
|
||||
"path": {
|
||||
// List of package content paths.
|
||||
// If the path points to a folder then
|
||||
// all the content under the folder will be included.
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"packageFolder": {
|
||||
// The path of package folder.
|
||||
"type": "string"
|
||||
},
|
||||
"changelog": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"content": {
|
||||
// Content of changelog in markdown
|
||||
"type": "string"
|
||||
},
|
||||
"hasBreakingChange": {
|
||||
// Does the new package has breaking change
|
||||
"type": "boolean"
|
||||
},
|
||||
"breakingChangeItems": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["content"]
|
||||
},
|
||||
"artifacts": {
|
||||
// The path to artifacts
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["packageName", "path", "packageFolder"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,5 @@
|
|||
"codeCoverage": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": ["total", "success", "fail", "apiCoverage", "codeCoverage"]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
# @azure-tools/sdk-generation-cli
|
||||
|
||||
This packages includes some commands used by sdk generation pipeline.
|
||||
|
||||
## Install
|
||||
|
||||
```shell
|
||||
npm i @azure-tools/sdk-generation-cli
|
||||
```
|
||||
|
||||
## Commands
|
||||
|
||||
### docker-cli
|
||||
It's used by docker image, and for the details, please refer to [How to Use Docker Image for SDK Generation](../../documents/docker/README.md).
|
||||
|
||||
### run-mock-host
|
||||
Run this command to start the mock host.
|
||||
Usage:
|
||||
```shell
|
||||
run-mock-host --readme=<path-to-readme> --spec-repo=<path-to-spec-repo> --mock-host-path=<mock-host-install-path>
|
||||
```
|
||||
For more details, please refer to [mock service host document](https://github.com/Azure/azure-sdk-tools/tree/main/tools/mock-service-host).
|
||||
|
||||
### getRepoName
|
||||
Get repository name from the http url and set it as azure pipeline variable.
|
||||
Usage:
|
||||
```shell
|
||||
getRepoName <variable-key> <repo-http-url>
|
||||
```
|
||||
|
||||
### generateResult
|
||||
Parse the logs produced by tasks, and generate a summarized task result in json format.
|
||||
Usage:
|
||||
```shell
|
||||
generateResult \
|
||||
--buildId=<build-id> \
|
||||
--taskName=<task-name> \
|
||||
--logfile=<task-log-path> \
|
||||
--resultOutputPath=<path-to-generate-result-file> \
|
||||
[--dockerResultFile=<all-tasks-result-path>] \
|
||||
[--exeResult=<tasks-result-status>] \
|
||||
[--taskOutputPath=<addition-object-path>] \
|
||||
[--logFilterStr=<specify-filter-for-log>]
|
||||
```
|
||||
|
||||
### publishResult
|
||||
Publish pipeline result to storage. [eventhub] is supported.
|
||||
NOTE: will get eventhub connection string from environment, variable is [EVENTHUB_SAS_URL]
|
||||
Usage:
|
||||
```shell
|
||||
publishResult \
|
||||
--storageType=eventhub \
|
||||
--pipelineStatus=<status> \
|
||||
--buildId=<build-id> \
|
||||
--trigger=<pipeline-trigger> \
|
||||
--logPath=<log-path-of-full-log> \
|
||||
--resultsPath=<task-result-path-arr>
|
||||
```
|
||||
|
||||
### uploadArtifact
|
||||
Upload artifact to blob.
|
||||
NOTE: will get blob connection string from environment, variable is [AZURE_STORAGE_BLOB_SAS_URL]
|
||||
Usage:
|
||||
```shell
|
||||
uploadArtifact \
|
||||
--generateAndBuildOutputFile=<generateAndBuildOutput-file-path> \
|
||||
--buildId=<build-id> \
|
||||
--language=<build-language>
|
||||
```
|
||||
|
||||
### prepareArtifactFiles
|
||||
Determine which files to upload, copy it to artifact directory.
|
||||
Usage:
|
||||
```shell
|
||||
prepareArtifactFiles \
|
||||
--artifactDir=<artifact-directory> \
|
||||
--generateAndBuildOutputFile=<generateAndBuildOutput-file-path> \
|
||||
--language=<build-language>
|
||||
```
|
|
@ -13,7 +13,7 @@ module.exports = {
|
|||
"<rootDir>/**/*.test.ts",
|
||||
],
|
||||
modulePathIgnorePatterns: [
|
||||
"<rootDir>/tmp/*"
|
||||
"<rootDir>(/.*)*/tmp/*"
|
||||
],
|
||||
collectCoverage: true,
|
||||
collectCoverageFrom: [
|
||||
|
|
|
@ -1,54 +1,69 @@
|
|||
{
|
||||
"name": "@azure-tools/sdk-generation-cli",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "rimraf dist && tsc",
|
||||
"prepack": "npm run build",
|
||||
"pack": "npm pack 2>&1",
|
||||
"test:prepare-environment": "ts-node test/utils/prepareEnvironment.ts",
|
||||
"test": "npm run test:prepare-environment && jest "
|
||||
},
|
||||
"author": "Microsoft",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist/**/*.js",
|
||||
"dist/**/*.json",
|
||||
"dist/**/*.js.map",
|
||||
"dist/**/*.d.ts",
|
||||
"dist/**/*.d.ts.map",
|
||||
"dist/**/*.handlebars",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"bin": {
|
||||
"getRepoName": "dist/getRepoNameCli.js",
|
||||
"generateResult": "dist/generateResultCli.js",
|
||||
"publishResult": "dist/publishResultCli.js",
|
||||
"uploadArtifact": "dist/uploadArtifactCli.js",
|
||||
"prepareArtifactFiles": "dist/prepareArtifactFilesCli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"@azure-tools/sdk-generation-lib": "^1.0.4",
|
||||
"ajv": "^6.12.6",
|
||||
"axios": "^0.24.0",
|
||||
"convict": "^6.2.3",
|
||||
"mongodb": "^3.6.10",
|
||||
"typeorm": "^0.2.37"
|
||||
},
|
||||
"bundledDependencies": [
|
||||
"@azure-tools/sdk-generation-lib"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@types/jest": "^25.2.1",
|
||||
"@types/node": "^16.11.7",
|
||||
"jest": "~26.6.3",
|
||||
"rimraf": "^3.0.2",
|
||||
"ts-jest": "~26.5.4",
|
||||
"ts-node": "~10.7.0",
|
||||
"typescript": "~4.6.3"
|
||||
}
|
||||
"name": "@azure-tools/sdk-generation-cli",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "rimraf dist && tsc",
|
||||
"prepack": "npm run build",
|
||||
"pack": "npm pack 2>&1",
|
||||
"test:unit-prepare-environment": "ts-node test/unit/utils/prepareEnvironment.ts",
|
||||
"test:unit": "npm run test:unit-prepare-environment && jest",
|
||||
"test:integration": "ts-node test/integration/integrationTest.ts",
|
||||
"test": "npm run test:unit && npm run test:integration",
|
||||
"lint": "eslint . -c ../../.eslintrc.json --ignore-path ../../.eslintignore --ext .ts",
|
||||
"lint:fix": "eslint . -c ../../.eslintrc.json --ignore-path ../../.eslintignore --ext .ts --fix"
|
||||
},
|
||||
"author": "Microsoft",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist/**/*.js",
|
||||
"dist/**/*.json",
|
||||
"dist/**/*.js.map",
|
||||
"dist/**/*.d.ts",
|
||||
"dist/**/*.d.ts.map",
|
||||
"dist/**/*.handlebars",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"bin": {
|
||||
"getRepoName": "dist/cli/pipelineCli/getRepoNameCli.js",
|
||||
"generateResult": "dist/cli/pipelineCli/generateResultCli.js",
|
||||
"publishResult": "dist/cli/pipelineCli/publishResultCli.js",
|
||||
"uploadArtifact": "dist/cli/pipelineCli/uploadArtifactCli.js",
|
||||
"prepareArtifactFiles": "dist/cli/pipelineCli/prepareArtifactFilesCli.js",
|
||||
"docker-cli": "dist/cli/dockerCli/dockerCli.js",
|
||||
"run-mock-host": "dist/cli/dockerCli/runMockHostCli.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"ajv": "^6.12.6",
|
||||
"convict": "^6.2.3",
|
||||
"@azure-tools/sdk-generation-lib": "^1.0.0",
|
||||
"axios": "^0.24.0",
|
||||
"dotenv": "^16.0.0",
|
||||
"winston": "~3.7.2",
|
||||
"command-line-args": "~5.2.1"
|
||||
},
|
||||
"bundledDependencies": [
|
||||
"@azure-tools/sdk-generation-lib"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@types/node": "^16.11.7",
|
||||
"rimraf": "^3.0.2",
|
||||
"jest": "~26.6.3",
|
||||
"ts-jest": "~26.5.4",
|
||||
"@types/jest": "^25.2.1",
|
||||
"typescript": "~4.6.3",
|
||||
"ts-node": "~10.7.0",
|
||||
"eslint": "^8.16.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.25.0",
|
||||
"eslint-config-google": "^0.14.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-n": "^15.0.0",
|
||||
"eslint-plugin-promise": "^6.0.0",
|
||||
"@typescript-eslint/parser": "^5.25.0",
|
||||
"eslint-plugin-simple-import-sort": "^7.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
import { initializeLogger } from '@azure-tools/sdk-generation-lib';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { Logger } from 'winston';
|
||||
|
||||
import { DockerCliInput } from '../schema/dockerCliInput';
|
||||
import { sdkToRepoMap } from './constants';
|
||||
import { DockerRunningModel } from './DockerRunningModel';
|
||||
|
||||
export class DockerContext {
|
||||
mode: DockerRunningModel;
|
||||
readmeMdPath?: string;
|
||||
tag?: string;
|
||||
sdkList: string[];
|
||||
specRepo?: string;
|
||||
workDir?: string;
|
||||
sdkRepo?: string;
|
||||
resultOutputFolder?: string;
|
||||
logger: Logger;
|
||||
|
||||
/*
|
||||
* there are different modes to use the docker image:
|
||||
* 1. local: generate codes
|
||||
* 2. local: grow up
|
||||
* 3. pipeline: generate codes
|
||||
* */
|
||||
public initialize(inputParams: DockerCliInput) {
|
||||
this.readmeMdPath = inputParams.readmeMdPath;
|
||||
this.tag = inputParams.tag;
|
||||
this.sdkList = inputParams.sdkList?.split(',').map((e) => e.trim()).filter((e) => e.length > 0);
|
||||
this.specRepo = inputParams.specRepo;
|
||||
this.workDir = inputParams.workDir;
|
||||
this.sdkRepo = inputParams.sdkRepo;
|
||||
this.resultOutputFolder = inputParams.resultOutputFolder;
|
||||
|
||||
this.logger = initializeLogger(path.join(inputParams.resultOutputFolder, inputParams.dockerLogger), 'docker');
|
||||
|
||||
if (this.sdkList?.length === 0 && fs.existsSync(this.workDir)) {
|
||||
this.logger.info('Preparing environment to do grow up development');
|
||||
this.mode = DockerRunningModel.GrowUp;
|
||||
this.validateSpecRepo();
|
||||
this.validateWorkDir();
|
||||
} else if (fs.existsSync(this.workDir)) {
|
||||
this.logger.info('Preparing environment to generate codes and do grow up development in local');
|
||||
this.mode = DockerRunningModel.CodeGenAndGrowUp;
|
||||
this.validateSpecRepo();
|
||||
this.validateReadmeMdPath();
|
||||
this.validateSdk();
|
||||
} else {
|
||||
this.logger.info('Preparing environment to generate codes in pipeline');
|
||||
this.mode = DockerRunningModel.Pipeline;
|
||||
this.validateSdkRepo();
|
||||
this.validateSpecRepo();
|
||||
this.validateReadmeMdPath();
|
||||
this.validateOutputFolder();
|
||||
}
|
||||
}
|
||||
|
||||
private validateSpecRepo() {
|
||||
if (!fs.existsSync(this.specRepo)) {
|
||||
throw new Error(`Cannot find ${this.sdkRepo}, please mount it to docker container`);
|
||||
}
|
||||
}
|
||||
|
||||
private validateReadmeMdPath() {
|
||||
if (!this.readmeMdPath) {
|
||||
throw new Error(`Get empty readme.md path, please input it with --readme`);
|
||||
}
|
||||
if (!fs.existsSync(path.join(this.specRepo, this.readmeMdPath))) {
|
||||
throw new Error(`Cannot find file ${this.readmeMdPath}, please input a valid one`);
|
||||
}
|
||||
}
|
||||
|
||||
private validateSdk() {
|
||||
const supportedSdk = Object.keys(sdkToRepoMap);
|
||||
const unSupportedSdk: string[] = [];
|
||||
for (const sdk of this.sdkList) {
|
||||
if (!supportedSdk.includes(sdk)) {
|
||||
unSupportedSdk.push(sdk);
|
||||
}
|
||||
}
|
||||
if (unSupportedSdk.length > 0) {
|
||||
throw new Error(`Docker container doesn't support the following sdks: ${unSupportedSdk.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
private validateWorkDir() {
|
||||
if (!fs.existsSync(this.workDir)) {
|
||||
throw new Error(`Cannot find ${this.workDir}, please mount it to docker container`);
|
||||
}
|
||||
}
|
||||
|
||||
private validateSdkRepo() {
|
||||
if (!fs.existsSync(this.sdkRepo)) {
|
||||
throw new Error(`Cannot find ${this.sdkRepo}, please mount it to docker container`);
|
||||
}
|
||||
}
|
||||
|
||||
private validateOutputFolder() {
|
||||
if (!fs.existsSync(this.resultOutputFolder)) {
|
||||
throw new Error(`Cannot find ${this.resultOutputFolder}, please mount it to docker container`);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
export enum DockerRunningModel {
|
||||
CodeGenAndGrowUp,
|
||||
GrowUp,
|
||||
Pipeline
|
||||
}
|
|
@ -0,0 +1,148 @@
|
|||
import { CodegenToSdkConfig, getCodegenToSdkConfig, requireJsonc, StringMap } from '@azure-tools/sdk-generation-lib';
|
||||
import { execSync } from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import { writeFileSync } from 'fs';
|
||||
import * as path from 'path';
|
||||
import { Logger } from 'winston';
|
||||
|
||||
import { disableFileMode, getHeadRef, getHeadSha, safeDirectory } from '../../../utils/git';
|
||||
import { dockerTaskEngineInput } from '../schema/dockerTaskEngineInput';
|
||||
import { DockerContext } from './DockerContext';
|
||||
import { DockerRunningModel } from './DockerRunningModel';
|
||||
import { GenerateAndBuildTask } from './tasks/GenerateAndBuildTask';
|
||||
import { InitTask } from './tasks/InitTask';
|
||||
import { MockTestTask } from './tasks/MockTestTask';
|
||||
import { SDKGenerationTaskBase } from './tasks/SDKGenerationTaskBase';
|
||||
|
||||
export class DockerTaskEngineContext {
|
||||
logger: Logger;
|
||||
configFilePath: string;
|
||||
initOutputJsonFile: string;
|
||||
generateAndBuildInputJsonFile: string;
|
||||
generateAndBuildOutputJsonFile: string;
|
||||
mockTestInputJsonFile: string;
|
||||
mockTestOutputJsonFile: string;
|
||||
initTaskLog: string;
|
||||
generateAndBuildTaskLog: string;
|
||||
mockTestTaskLog: string;
|
||||
readmeMdPath: string;
|
||||
specRepo: {
|
||||
repoPath: string;
|
||||
headSha: string;
|
||||
headRef: string;
|
||||
repoHttpsUrl: string;
|
||||
};
|
||||
serviceType?: string;
|
||||
tag?: string;
|
||||
sdkRepo: string;
|
||||
resultOutputFolder?: string;
|
||||
envs?: StringMap<string | boolean | number>;
|
||||
packageFolders?: string[];
|
||||
mockServerHost?: string;
|
||||
taskResults?: {};
|
||||
taskResultJsonPath: string;
|
||||
changeOwner: boolean;
|
||||
mode: DockerRunningModel;
|
||||
|
||||
public initialize(dockerContext: DockerContext) {
|
||||
// before execute task engine, safe spec repos and sdk repos because they may be owned by others
|
||||
safeDirectory(dockerContext.specRepo);
|
||||
safeDirectory(dockerContext.sdkRepo);
|
||||
const dockerTaskEngineConfigProperties = dockerTaskEngineInput.getProperties();
|
||||
this.logger = dockerContext.logger;
|
||||
this.configFilePath = dockerTaskEngineConfigProperties.configFilePath;
|
||||
this.initOutputJsonFile = path.join(dockerContext.resultOutputFolder, dockerTaskEngineConfigProperties.initOutputJsonFile);
|
||||
this.generateAndBuildInputJsonFile = path.join(dockerContext.resultOutputFolder, dockerTaskEngineConfigProperties.generateAndBuildInputJsonFile);
|
||||
this.generateAndBuildOutputJsonFile = path.join(dockerContext.resultOutputFolder, dockerTaskEngineConfigProperties.generateAndBuildOutputJsonFile);
|
||||
this.mockTestInputJsonFile = path.join(dockerContext.resultOutputFolder, dockerTaskEngineConfigProperties.mockTestInputJsonFile);
|
||||
this.mockTestOutputJsonFile = path.join(dockerContext.resultOutputFolder, dockerTaskEngineConfigProperties.mockTestOutputJsonFile);
|
||||
this.initTaskLog = path.join(dockerContext.resultOutputFolder, dockerTaskEngineConfigProperties.initTaskLog);
|
||||
this.generateAndBuildTaskLog = path.join(dockerContext.resultOutputFolder, dockerTaskEngineConfigProperties.generateAndBuildTaskLog);
|
||||
this.mockTestTaskLog = path.join(dockerContext.resultOutputFolder, dockerTaskEngineConfigProperties.mockTestTaskLog);
|
||||
this.readmeMdPath = dockerContext.readmeMdPath;
|
||||
this.specRepo = {
|
||||
repoPath: dockerContext.specRepo,
|
||||
headSha: dockerTaskEngineConfigProperties.headSha ?? dockerContext.mode === DockerRunningModel.Pipeline?
|
||||
getHeadSha(dockerContext.specRepo) : '{commit_id}',
|
||||
headRef: dockerTaskEngineConfigProperties.headRef ?? getHeadRef(dockerContext.specRepo),
|
||||
repoHttpsUrl: dockerTaskEngineConfigProperties.repoHttpsUrl
|
||||
};
|
||||
this.serviceType = dockerContext.readmeMdPath.includes('data-plane') && dockerTaskEngineConfigProperties.serviceType ? 'data-plane': 'resource-manager';
|
||||
this.tag = dockerContext.tag;
|
||||
this.sdkRepo = dockerContext.sdkRepo;
|
||||
this.resultOutputFolder = dockerContext.resultOutputFolder ?? '/tmp/output';
|
||||
this.mockServerHost = dockerTaskEngineConfigProperties.mockServerHost;
|
||||
this.taskResultJsonPath = path.join(dockerContext.resultOutputFolder, dockerTaskEngineConfigProperties.taskResultJson);
|
||||
this.changeOwner = dockerTaskEngineConfigProperties.changeOwner;
|
||||
this.mode = dockerContext.mode;
|
||||
}
|
||||
|
||||
public async beforeRunTaskEngine() {
|
||||
if (!!this.resultOutputFolder && !fs.existsSync(this.resultOutputFolder)) {
|
||||
fs.mkdirSync(this.resultOutputFolder, { recursive: true });
|
||||
}
|
||||
this.logger.info(`Start to run task engine in ${path.basename(this.sdkRepo)}`);
|
||||
}
|
||||
|
||||
public async afterRunTaskEngine() {
|
||||
if (this.changeOwner && !!this.specRepo?.repoPath && !!fs.existsSync(this.specRepo.repoPath)) {
|
||||
const userGroupId = (execSync(`stat -c "%u:%g" ${this.specRepo.repoPath}`, { encoding: 'utf8' })).trim();
|
||||
if (!!this.resultOutputFolder && fs.existsSync(this.resultOutputFolder)) {
|
||||
execSync(`chown -R ${userGroupId} ${this.specRepo.repoPath}`);
|
||||
}
|
||||
if (!!this.sdkRepo && fs.existsSync(this.sdkRepo)) {
|
||||
execSync(`chown -R ${userGroupId} ${this.sdkRepo}`, { encoding: 'utf8' });
|
||||
disableFileMode(this.sdkRepo);
|
||||
}
|
||||
}
|
||||
if (!!this.taskResults) {
|
||||
writeFileSync(this.taskResultJsonPath, JSON.stringify(this.taskResults, undefined, 2), 'utf-8');
|
||||
}
|
||||
this.logger.info(`Finish running task engine in ${path.basename(this.sdkRepo)}`);
|
||||
}
|
||||
|
||||
public async getTaskToRun(): Promise<SDKGenerationTaskBase[]> {
|
||||
const codegenToSdkConfig: CodegenToSdkConfig = getCodegenToSdkConfig(requireJsonc(path.join(this.sdkRepo, this.configFilePath)));
|
||||
this.logger.info(`Get codegen_to_sdk_config.json`);
|
||||
this.logger.info(JSON.stringify(codegenToSdkConfig, undefined, 2));
|
||||
const tasksToRun: SDKGenerationTaskBase[] = [];
|
||||
for (const taskName of Object.keys(codegenToSdkConfig)) {
|
||||
let task: SDKGenerationTaskBase;
|
||||
switch (taskName) {
|
||||
case 'init':
|
||||
task = new InitTask(this);
|
||||
break;
|
||||
case 'generateAndBuild':
|
||||
task = new GenerateAndBuildTask(this);
|
||||
break;
|
||||
case 'mockTest':
|
||||
task = new MockTestTask(this);
|
||||
break;
|
||||
}
|
||||
|
||||
if (!!task) {
|
||||
tasksToRun.push(task);
|
||||
if (!this.taskResults) {
|
||||
this.taskResults = {};
|
||||
}
|
||||
this.taskResults[taskName] = 'skipped';
|
||||
}
|
||||
}
|
||||
tasksToRun.sort((a, b) => a.order - b.order);
|
||||
this.logger.info(`Get tasks to run: ${tasksToRun.map((task) => task.taskType).join(',')}`);
|
||||
return tasksToRun;
|
||||
}
|
||||
|
||||
public async runTaskEngine() {
|
||||
await this.beforeRunTaskEngine();
|
||||
try {
|
||||
const tasksToRun: SDKGenerationTaskBase[] = await this.getTaskToRun();
|
||||
for (const task of tasksToRun) {
|
||||
await task.execute();
|
||||
}
|
||||
} finally {
|
||||
await this.afterRunTaskEngine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
export const sdkToRepoMap = {
|
||||
'js': 'azure-sdk-for-js',
|
||||
'python': 'azure-sdk-for-python',
|
||||
'go': 'azure-sdk-for-go',
|
||||
'java': 'azure-sdk-for-java',
|
||||
'.net': 'azure-sdk-for-net'
|
||||
};
|
|
@ -0,0 +1,8 @@
|
|||
import * as fs from 'fs';
|
||||
|
||||
export class BaseJob {
|
||||
public doNotExitDockerContainer() {
|
||||
// this file will be used by entrypoint.sh to determine whether exit the docker container.
|
||||
fs.writeFileSync('/tmp/notExit', 'yes', 'utf-8');
|
||||
}
|
||||
}
|
|
@ -0,0 +1,57 @@
|
|||
import { existsSync } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import { cloneRepo, getChangedPackageDirectory } from '../../../../utils/git';
|
||||
import { sdkToRepoMap } from '../constants';
|
||||
import { DockerContext } from '../DockerContext';
|
||||
import { DockerTaskEngineContext } from '../DockerTaskEngineContext';
|
||||
import { BaseJob } from './BaseJob';
|
||||
|
||||
export class GenerateCodesInLocalJob extends BaseJob {
|
||||
context: DockerContext;
|
||||
|
||||
constructor(context: DockerContext) {
|
||||
super();
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public async cloneRepoIfNotExist(sdkRepos: string[]) {
|
||||
for (const sdkRepo of sdkRepos) {
|
||||
if (!existsSync(path.join(this.context.workDir, sdkRepo))) {
|
||||
await cloneRepo(sdkRepo, this.context.workDir, this.context.logger);
|
||||
}
|
||||
this.context.sdkRepo = path.join(this.context.workDir, sdkRepo);
|
||||
}
|
||||
}
|
||||
|
||||
public async execute() {
|
||||
const sdkRepos: string[] = this.context.sdkList.map((ele) => sdkToRepoMap[ele]);
|
||||
await this.cloneRepoIfNotExist(sdkRepos);
|
||||
for (const sdk of this.context.sdkList) {
|
||||
this.context.sdkRepo = path.join(this.context.workDir, sdkToRepoMap[sdk]);
|
||||
const dockerTaskEngineContext = new DockerTaskEngineContext();
|
||||
dockerTaskEngineContext.initialize(this.context);
|
||||
await dockerTaskEngineContext.runTaskEngine();
|
||||
}
|
||||
|
||||
const generatedCodesPath: Map<string, Set<string>> = new Map();
|
||||
|
||||
for (const sdk of this.context.sdkList) {
|
||||
generatedCodesPath[sdk] = await getChangedPackageDirectory(path.join(this.context.workDir, sdkToRepoMap[sdk]));
|
||||
}
|
||||
|
||||
this.context.logger.info(`Finish generating sdk for ${this.context.sdkList.join(', ')}.`);
|
||||
for (const sdk of this.context.sdkList) {
|
||||
if (generatedCodesPath[sdk].size > 0) {
|
||||
this.context.logger.info(`You can find changed files of ${sdk} in:`);
|
||||
generatedCodesPath[sdk].forEach((ele) => {
|
||||
this.context.logger.info(` - ${path.join(this.context.workDir, sdkToRepoMap[sdk], ele)}`);
|
||||
});
|
||||
} else {
|
||||
this.context.logger.info(`Cannot find changed files of ${sdk} because there is no git diff.`);
|
||||
}
|
||||
}
|
||||
this.context.logger.info(`You can use vscode to connect this docker container for further development.`);
|
||||
this.doNotExitDockerContainer();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
import { DockerContext } from '../DockerContext';
|
||||
import { DockerTaskEngineContext } from '../DockerTaskEngineContext';
|
||||
import { BaseJob } from './BaseJob';
|
||||
|
||||
export class GenerateCodesInPipelineJob extends BaseJob {
|
||||
context: DockerContext;
|
||||
|
||||
constructor(context: DockerContext) {
|
||||
super();
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public async execute() {
|
||||
const context: DockerTaskEngineContext = new DockerTaskEngineContext();
|
||||
context.initialize(this.context);
|
||||
await context.runTaskEngine();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
import { DockerContext } from '../DockerContext';
|
||||
import { BaseJob } from './BaseJob';
|
||||
|
||||
export class GrowUpJob extends BaseJob {
|
||||
context: DockerContext;
|
||||
|
||||
constructor(context: DockerContext) {
|
||||
super();
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public async execute() {
|
||||
this.context.logger.info(`Please use vscode to connect this container.`);
|
||||
this.doNotExitDockerContainer();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
import {
|
||||
addFileLog, GenerateAndBuildInput,
|
||||
GenerateAndBuildOptions,
|
||||
getGenerateAndBuildOutput,
|
||||
getTask, removeFileLog, requireJsonc,
|
||||
runScript
|
||||
} from '@azure-tools/sdk-generation-lib';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { DockerTaskEngineContext } from '../DockerTaskEngineContext';
|
||||
import { SDKGenerationTaskBase, TaskType } from './SDKGenerationTaskBase';
|
||||
|
||||
export class GenerateAndBuildTask implements SDKGenerationTaskBase {
|
||||
taskType: TaskType;
|
||||
order: number;
|
||||
context: DockerTaskEngineContext;
|
||||
|
||||
constructor(context: DockerTaskEngineContext) {
|
||||
this.taskType = 'GenerateAndBuildTask';
|
||||
this.order = 1;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public async execute() {
|
||||
const generateAndBuildTask = getTask(path.join(this.context.sdkRepo, this.context.configFilePath), 'generateAndBuild');
|
||||
if (!generateAndBuildTask) {
|
||||
throw new Error(`Generate and build task is ${generateAndBuildTask}`);
|
||||
}
|
||||
const generateAndBuildOptions = generateAndBuildTask as GenerateAndBuildOptions;
|
||||
const runOptions = generateAndBuildOptions.generateAndBuildScript;
|
||||
const readmeMdAbsolutePath = path.join(this.context.specRepo.repoPath, this.context.readmeMdPath);
|
||||
const specRepoPath = this.context.specRepo.repoPath.includes('specification')?
|
||||
this.context.specRepo.repoPath : path.join(this.context.specRepo.repoPath, 'specification');
|
||||
const relatedReadmeMdFileRelativePath = path.relative(specRepoPath, readmeMdAbsolutePath);
|
||||
const inputContent: GenerateAndBuildInput = {
|
||||
specFolder: specRepoPath,
|
||||
headSha: this.context.specRepo.headSha,
|
||||
headRef: this.context.specRepo.headRef,
|
||||
repoHttpsUrl: this.context.specRepo.repoHttpsUrl,
|
||||
relatedReadmeMdFile: relatedReadmeMdFileRelativePath,
|
||||
serviceType: this.context.serviceType
|
||||
};
|
||||
const inputJson = JSON.stringify(inputContent, undefined, 2);
|
||||
this.context.logger.info(`Get ${path.basename(this.context.generateAndBuildInputJsonFile)}:`);
|
||||
this.context.logger.info(inputJson);
|
||||
fs.writeFileSync(this.context.generateAndBuildInputJsonFile, inputJson, { encoding: 'utf-8' });
|
||||
addFileLog(this.context.logger, this.context.generateAndBuildTaskLog, 'generateAndBuild');
|
||||
const executeResult = await runScript(runOptions, {
|
||||
cwd: path.resolve(this.context.sdkRepo),
|
||||
args: [this.context.generateAndBuildInputJsonFile, this.context.generateAndBuildOutputJsonFile],
|
||||
envs: this.context.envs,
|
||||
customizedLogger: this.context.logger
|
||||
});
|
||||
removeFileLog(this.context.logger, 'generateAndBuild');
|
||||
this.context.taskResults['generateAndBuild'] = executeResult;
|
||||
if (executeResult === 'failed') {
|
||||
throw new Error(`Execute generateAndBuild script failed.`);
|
||||
}
|
||||
if (fs.existsSync(this.context.generateAndBuildOutputJsonFile)) {
|
||||
const generateAndBuildOutputJson = getGenerateAndBuildOutput(requireJsonc(this.context.generateAndBuildOutputJsonFile));
|
||||
this.context.logger.info(`Get ${path.basename(this.context.generateAndBuildOutputJsonFile)}:`);
|
||||
this.context.logger.info(JSON.stringify(generateAndBuildOutputJson, undefined, 2));
|
||||
const packageFolders: string[] = [];
|
||||
for (const p of generateAndBuildOutputJson.packages) {
|
||||
packageFolders.push(p.packageFolder);
|
||||
}
|
||||
this.context.packageFolders = packageFolders;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
import {
|
||||
addFileLog,
|
||||
getTask,
|
||||
InitOptions,
|
||||
initOutput,
|
||||
removeFileLog,
|
||||
requireJsonc,
|
||||
runScript
|
||||
} from '@azure-tools/sdk-generation-lib';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { DockerTaskEngineContext } from '../DockerTaskEngineContext';
|
||||
import { SDKGenerationTaskBase, TaskType } from './SDKGenerationTaskBase';
|
||||
|
||||
export class InitTask implements SDKGenerationTaskBase {
|
||||
taskType: TaskType;
|
||||
order: number;
|
||||
context: DockerTaskEngineContext;
|
||||
|
||||
constructor(context: DockerTaskEngineContext) {
|
||||
this.taskType = 'InitTask';
|
||||
this.order = 0;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public async execute() {
|
||||
const initTask = getTask(path.join(this.context.sdkRepo, this.context.configFilePath), 'init');
|
||||
if (!initTask) {
|
||||
throw new Error(`Init task is ${initTask}`);
|
||||
}
|
||||
const initOptions = initTask as InitOptions;
|
||||
const runOptions = initOptions.initScript;
|
||||
addFileLog(this.context.logger, this.context.initTaskLog, 'init');
|
||||
const executeResult = await runScript(runOptions, {
|
||||
cwd: path.resolve(this.context.sdkRepo),
|
||||
args: [this.context.initOutputJsonFile],
|
||||
customizedLogger: this.context.logger
|
||||
});
|
||||
removeFileLog(this.context.logger, 'init');
|
||||
this.context.taskResults['init'] = executeResult;
|
||||
if (executeResult === 'failed') {
|
||||
throw new Error(`Execute init script failed.`);
|
||||
}
|
||||
if (fs.existsSync(this.context.initOutputJsonFile)) {
|
||||
const initOutputJson = initOutput(requireJsonc(this.context.initOutputJsonFile));
|
||||
this.context.logger.info(`Get ${path.basename(this.context.initOutputJsonFile)}:`);
|
||||
this.context.logger.info(JSON.stringify(initOutputJson, undefined, 2));
|
||||
|
||||
if (initOutputJson?.envs) {
|
||||
this.context.envs = initOutputJson.envs;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,72 @@
|
|||
import {
|
||||
addFileLog,
|
||||
getTask,
|
||||
getTestOutput,
|
||||
MockTestInput,
|
||||
MockTestOptions, removeFileLog, requireJsonc,
|
||||
runScript, TaskResultStatus
|
||||
} from '@azure-tools/sdk-generation-lib';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { DockerTaskEngineContext } from '../DockerTaskEngineContext';
|
||||
import { SDKGenerationTaskBase, TaskType } from './SDKGenerationTaskBase';
|
||||
|
||||
export class MockTestTask implements SDKGenerationTaskBase {
|
||||
context: DockerTaskEngineContext;
|
||||
order: number;
|
||||
taskType: TaskType;
|
||||
|
||||
constructor(context: DockerTaskEngineContext) {
|
||||
this.taskType = 'MockTestTask';
|
||||
this.order = 2;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
public async execute() {
|
||||
const mockTestTask = getTask(path.join(this.context.sdkRepo, this.context.configFilePath), 'mockTest');
|
||||
if (!mockTestTask) {
|
||||
throw new Error(`Init task is ${mockTestTask}`);
|
||||
}
|
||||
const mockTestOptions = mockTestTask as MockTestOptions;
|
||||
const runOptions = mockTestOptions.mockTestScript;
|
||||
for (const packageFolder of this.context.packageFolders) {
|
||||
this.context.logger.info(`Run MockTest for ${packageFolder}`);
|
||||
|
||||
const inputContent: MockTestInput = {
|
||||
packageFolder: path.join(this.context.sdkRepo, packageFolder),
|
||||
mockServerHost: this.context.mockServerHost
|
||||
};
|
||||
const inputJson = JSON.stringify(inputContent, undefined, 2);
|
||||
const formattedPackageName = packageFolder.replace(/[^a-zA-z0-9]/g, '-');
|
||||
const mockTestInputJsonPath = this.context.packageFolders.length > 1 ?
|
||||
this.context.mockTestInputJsonFile.replace('.json', `${formattedPackageName}.json`) : this.context.mockTestInputJsonFile;
|
||||
const mockTestOutputJsonPath = this.context.packageFolders.length > 1 ?
|
||||
this.context.mockTestOutputJsonFile.replace('.json', `${formattedPackageName}.json`) : this.context.mockTestOutputJsonFile;
|
||||
const mockTestTaskLogPath = this.context.packageFolders.length > 1 ?
|
||||
this.context.mockTestTaskLog.replace('task.log', `${formattedPackageName}-task.log`) : this.context.mockTestTaskLog;
|
||||
fs.writeFileSync(mockTestInputJsonPath, inputJson, { encoding: 'utf-8' });
|
||||
this.context.logger.info(`Get ${path.basename(mockTestInputJsonPath)}:`);
|
||||
this.context.logger.info(inputJson);
|
||||
addFileLog(this.context.logger, mockTestTaskLogPath, `mockTest_${formattedPackageName}`);
|
||||
const executeResult = await runScript(runOptions, {
|
||||
cwd: path.resolve(this.context.sdkRepo),
|
||||
args: [mockTestInputJsonPath, mockTestOutputJsonPath],
|
||||
envs: this.context.envs,
|
||||
customizedLogger: this.context.logger
|
||||
});
|
||||
this.context.taskResults['mockTest'] = executeResult === TaskResultStatus.Success &&
|
||||
this.context.taskResults['mockTest'] !== TaskResultStatus.Failure ?
|
||||
TaskResultStatus.Success : TaskResultStatus.Failure;
|
||||
removeFileLog(this.context.logger, `mockTest_${formattedPackageName}`);
|
||||
if (fs.existsSync(mockTestOutputJsonPath)) {
|
||||
const mockTestOutputJson = getTestOutput(requireJsonc(mockTestOutputJsonPath));
|
||||
this.context.logger.info(`Get ${path.basename(mockTestOutputJsonPath)}:`);
|
||||
this.context.logger.info(JSON.stringify(mockTestOutputJson, undefined, 2));
|
||||
}
|
||||
if (this.context.taskResults['mockTest'] === TaskResultStatus.Failure) {
|
||||
throw new Error('Run Mock Test Failed');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
import { DockerTaskEngineContext } from '../DockerTaskEngineContext';
|
||||
|
||||
export type TaskType = 'InitTask' | 'GenerateAndBuildTask' | 'MockTestTask';
|
||||
|
||||
export interface SDKGenerationTaskBase {
|
||||
taskType: TaskType;
|
||||
context: DockerTaskEngineContext;
|
||||
order: number;
|
||||
execute();
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
#!/usr/bin/env node
|
||||
import { DockerContext } from './core/DockerContext';
|
||||
import { DockerRunningModel } from './core/DockerRunningModel';
|
||||
import { GenerateCodesInLocalJob } from './core/jobs/GenerateCodesInLocalJob';
|
||||
import { GenerateCodesInPipelineJob } from './core/jobs/GenerateCodesInPipelineJob';
|
||||
import { GrowUpJob } from './core/jobs/GrowUpJob';
|
||||
import { DockerCliInput, dockerCliInput } from './schema/dockerCliInput';
|
||||
|
||||
async function main() {
|
||||
const inputParams: DockerCliInput = dockerCliInput.getProperties();
|
||||
const context: DockerContext = new DockerContext();
|
||||
context.initialize(inputParams);
|
||||
|
||||
let executeJob: GenerateCodesInLocalJob | GrowUpJob | GenerateCodesInPipelineJob;
|
||||
|
||||
switch (context.mode) {
|
||||
case DockerRunningModel.CodeGenAndGrowUp:
|
||||
executeJob = new GenerateCodesInLocalJob(context);
|
||||
break;
|
||||
case DockerRunningModel.GrowUp:
|
||||
executeJob = new GrowUpJob(context);
|
||||
break;
|
||||
case DockerRunningModel.Pipeline:
|
||||
executeJob = new GenerateCodesInPipelineJob(context);
|
||||
break;
|
||||
}
|
||||
|
||||
if (!!executeJob) {
|
||||
await executeJob.execute();
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error('\x1b[31m', e.toString());
|
||||
console.error('\x1b[31m', e.message);
|
||||
console.error('\x1b[31m', e.stack);
|
||||
process.exit(1);
|
||||
});
|
|
@ -0,0 +1,53 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
import { initializeLogger } from '@azure-tools/sdk-generation-lib';
|
||||
import { spawn } from 'child_process';
|
||||
import * as path from 'path';
|
||||
import { Logger } from 'winston';
|
||||
|
||||
import { DockerCliInput, dockerCliInput } from './schema/dockerCliInput';
|
||||
import { DockerMockHostInput, dockerMockHostInput } from './schema/mockHostCliInput';
|
||||
|
||||
export type DockerMockHostContext = {
|
||||
readmeMdPath?: string;
|
||||
specRepo?: string;
|
||||
mockHostPath: string;
|
||||
logger: Logger;
|
||||
}
|
||||
|
||||
export function initializeDockerMockHostContext(inputParams: DockerMockHostInput & DockerCliInput) {
|
||||
const dockerMockHostConfigProperties = dockerMockHostInput.getProperties();
|
||||
const dockerMockHostContext: DockerMockHostContext = {
|
||||
readmeMdPath: inputParams.readmeMdPath,
|
||||
specRepo: inputParams.specRepo,
|
||||
mockHostPath: dockerMockHostConfigProperties.mockHostPath,
|
||||
logger: initializeLogger(path.join(inputParams.resultOutputFolder, dockerMockHostConfigProperties.mockHostLogger), 'mock-host', false)
|
||||
};
|
||||
return dockerMockHostContext;
|
||||
}
|
||||
|
||||
export function runMockHost() {
|
||||
const inputParams: DockerMockHostInput & DockerCliInput = {
|
||||
...dockerCliInput.getProperties(),
|
||||
...dockerMockHostInput.getProperties()
|
||||
};
|
||||
const context = initializeDockerMockHostContext(inputParams);
|
||||
if (!context.readmeMdPath) {
|
||||
context.logger.log('cmdout', `Cannot get valid readme, so do not start mock server.`);
|
||||
return;
|
||||
}
|
||||
const swaggerJsonFilePattern = context.readmeMdPath.replace(/readme[.a-z-]*.md/gi, '**/*.json');
|
||||
const child = spawn(`node`, [`node_modules/@azure-tools/mock-service-host/dist/src/main.js`], {
|
||||
cwd: context.mockHostPath,
|
||||
env: {
|
||||
...process.env,
|
||||
'specRetrievalMethod': 'filesystem',
|
||||
'specRetrievalLocalRelativePath': context.specRepo,
|
||||
'validationPathsPattern': swaggerJsonFilePattern
|
||||
}
|
||||
});
|
||||
child.stdout.on('data', (data) => context.logger.log('cmdout', data.toString()));
|
||||
child.stderr.on('data', (data) => context.logger.log('cmderr', data.toString()));
|
||||
}
|
||||
|
||||
runMockHost();
|
|
@ -0,0 +1,74 @@
|
|||
import convict from 'convict';
|
||||
import * as dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
export class DockerCliInput {
|
||||
readmeMdPath: string;
|
||||
tag: string;
|
||||
sdkList: string;
|
||||
specRepo: string;
|
||||
workDir: string;
|
||||
sdkRepo: string;
|
||||
resultOutputFolder: string;
|
||||
dockerLogger: string;
|
||||
}
|
||||
|
||||
export const dockerCliInput = convict<DockerCliInput>({
|
||||
readmeMdPath: {
|
||||
default: '',
|
||||
env: 'README_MD_PATH',
|
||||
arg: 'readme',
|
||||
format: String,
|
||||
doc: 'The relative path to readme.md, which is from the root of spec repo'
|
||||
},
|
||||
tag: {
|
||||
default: '',
|
||||
env: 'TAG',
|
||||
arg: 'tag',
|
||||
format: String,
|
||||
doc: 'The tag used to generated codes. If not defined, default tag will be used'
|
||||
},
|
||||
sdkList: {
|
||||
default: '',
|
||||
env: 'SDK',
|
||||
arg: 'sdk',
|
||||
format: String,
|
||||
doc: 'which language of sdks do you want to generate? you can input multi language splitted by comma'
|
||||
},
|
||||
specRepo: {
|
||||
default: '/spec-repo',
|
||||
env: 'SPEC_REPO',
|
||||
arg: 'spec-repo',
|
||||
format: String,
|
||||
doc: 'the absolute path of the mounted spec repo'
|
||||
},
|
||||
workDir: {
|
||||
default: '/work-dir',
|
||||
env: 'WORK_DIR',
|
||||
arg: 'work-dir',
|
||||
format: String,
|
||||
doc: 'the absolute path of work directory, which contains all sdk repos'
|
||||
},
|
||||
sdkRepo: {
|
||||
default: '/sdk-repo',
|
||||
env: 'SDK_REPO',
|
||||
arg: 'sdk-repo',
|
||||
format: String,
|
||||
doc: 'the absolute path of sdk repo'
|
||||
},
|
||||
resultOutputFolder: {
|
||||
default: '/tmp/output',
|
||||
env: 'RESULT_OUTPUT_FOLDER',
|
||||
arg: 'result-output-folder',
|
||||
format: String,
|
||||
doc: 'the absolute path of output folder, which stores the result of task engine'
|
||||
},
|
||||
dockerLogger: {
|
||||
default: 'docker.log',
|
||||
env: 'DOCKER_LOGGER',
|
||||
arg: 'docker-logger',
|
||||
format: String,
|
||||
doc: 'the path of docker.log. it will concat with resultOutputFolder'
|
||||
}
|
||||
});
|
|
@ -0,0 +1,128 @@
|
|||
import convict from 'convict';
|
||||
import * as dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
export class DockerTaskEngineInput {
|
||||
configFilePath: string;
|
||||
initOutputJsonFile: string;
|
||||
generateAndBuildInputJsonFile: string;
|
||||
generateAndBuildOutputJsonFile: string;
|
||||
mockTestInputJsonFile: string;
|
||||
mockTestOutputJsonFile: string;
|
||||
headSha: string | undefined;
|
||||
headRef: string | undefined;
|
||||
repoHttpsUrl: string;
|
||||
serviceType: string;
|
||||
mockServerHost?: string;
|
||||
initTaskLog: string;
|
||||
generateAndBuildTaskLog: string;
|
||||
mockTestTaskLog: string;
|
||||
taskResultJson: string;
|
||||
changeOwner: boolean;
|
||||
}
|
||||
|
||||
export const dockerTaskEngineInput = convict<DockerTaskEngineInput>({
|
||||
configFilePath: {
|
||||
default: 'eng/codegen_to_sdk_config.json',
|
||||
env: 'CONFIG_FILE_PATH',
|
||||
arg: 'configFilePath',
|
||||
format: String,
|
||||
doc: 'The relative path to codegen_to_sdk_config.json'
|
||||
},
|
||||
initOutputJsonFile: {
|
||||
default: 'initOutput.json',
|
||||
env: 'INIT_OUTPUT_JSON_FILE',
|
||||
arg: 'initOutputJsonFile',
|
||||
format: String,
|
||||
doc: 'The relative path to initOut.json. It will concat with resultOutputFolder'
|
||||
},
|
||||
generateAndBuildInputJsonFile: {
|
||||
default: 'generateAndBuildInput.json',
|
||||
env: 'GENERATE_AND_BUILD_INPUT_JSON_FILE',
|
||||
arg: 'generateAndBuildInputJsonFile',
|
||||
format: String,
|
||||
doc: 'The relative path to generateAndBuildInput.json. It will concat with resultOutputFolder'
|
||||
},
|
||||
generateAndBuildOutputJsonFile: {
|
||||
default: 'generateAndBuildOutputJson.json',
|
||||
env: 'GENERATE_AND_BUILD_OUTPUT_JSON_File',
|
||||
arg: 'generateAndBuildOutputJsonFile',
|
||||
format: String,
|
||||
doc: 'The relative path to generateAndBuildOutput.json. It will concat with resultOutputFolder'
|
||||
},
|
||||
mockTestInputJsonFile: {
|
||||
default: 'mockTestInput.json',
|
||||
env: 'MOCK_TEST_INPUT_JSON_FILE',
|
||||
arg: 'mockTestInputJsonFile',
|
||||
format: String,
|
||||
doc: 'The relative path to mockTestInput.json. It will concat with resultOutputFolder'
|
||||
},
|
||||
mockTestOutputJsonFile: {
|
||||
default: 'mockTestOutput.json',
|
||||
env: 'MOCK_TEST_OUTPUT_JSON_FILE',
|
||||
arg: 'mockTestOutputJsonFile',
|
||||
format: String,
|
||||
doc: 'The relative path to mockTestOutput.json. It will concat with resultOutputFolder'
|
||||
},
|
||||
headSha: {
|
||||
default: undefined,
|
||||
env: 'HEAD_SHA',
|
||||
format: String,
|
||||
doc: 'headSha of spec repo'
|
||||
},
|
||||
headRef: {
|
||||
default: undefined,
|
||||
env: 'HEAD_REF',
|
||||
format: String,
|
||||
doc: 'headRef of spec repo'
|
||||
},
|
||||
repoHttpsUrl: {
|
||||
default: 'https://github.com/Azure/azure-rest-api-specs',
|
||||
env: 'REPO_HTTP_URL',
|
||||
format: String,
|
||||
doc: 'The http url of spec repo'
|
||||
},
|
||||
serviceType: {
|
||||
default: 'resource-manager',
|
||||
env: 'SERVICE_TYPE',
|
||||
format: String,
|
||||
doc: 'resource-manager or data-plane'
|
||||
},
|
||||
mockServerHost: {
|
||||
default: 'https://localhost:8443',
|
||||
env: 'MOCK_SERVER_HOST',
|
||||
format: String,
|
||||
doc: 'The host of mocker server'
|
||||
},
|
||||
initTaskLog: {
|
||||
default: 'init-task.log',
|
||||
env: 'INIT_TASK_LOG',
|
||||
format: String,
|
||||
doc: 'The relative path to init-task.log. It will concat with resultOutputFolder'
|
||||
},
|
||||
generateAndBuildTaskLog: {
|
||||
default: 'generateAndBuild-task.log',
|
||||
env: 'GENERATE_AND_BUILD_TASK_LOG',
|
||||
format: String,
|
||||
doc: 'The relative path to generate-and-build-task.log. It will concat with resultOutputFolder'
|
||||
},
|
||||
mockTestTaskLog: {
|
||||
default: 'mockTest-task.log',
|
||||
env: 'MOCK_TEST_TASK_LOG',
|
||||
format: String,
|
||||
doc: 'The relative path to mock-test-task.log. It will concat with resultOutputFolder'
|
||||
},
|
||||
taskResultJson: {
|
||||
default: 'taskResults.json',
|
||||
env: 'TASK_RESULT_JSON',
|
||||
format: String,
|
||||
doc: 'The relative path to taskResult.json. It will concat with resultOutputFolder'
|
||||
},
|
||||
changeOwner: {
|
||||
default: true,
|
||||
env: 'CHANGE_OWNER',
|
||||
format: Boolean,
|
||||
doc: 'When the commands run in docker, it is required to change the sdk owner because generated codes is owned by root'
|
||||
}
|
||||
});
|
|
@ -0,0 +1,26 @@
|
|||
import convict from 'convict';
|
||||
import * as dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
export class DockerMockHostInput {
|
||||
mockHostLogger: string;
|
||||
mockHostPath: string;
|
||||
}
|
||||
|
||||
export const dockerMockHostInput = convict<DockerMockHostInput>({
|
||||
mockHostLogger: {
|
||||
default: 'mock-host.log',
|
||||
env: 'MOCK_HOST_LOGGER',
|
||||
arg: 'mock-host-logger',
|
||||
format: String,
|
||||
doc: 'the path of mock-host.log. it will concat with resultOutputFolder'
|
||||
},
|
||||
mockHostPath: {
|
||||
default: '/mock-host',
|
||||
env: 'MOCK_HOST_PATH',
|
||||
arg: 'mock-host-path',
|
||||
format: String,
|
||||
doc: 'the path of mock-host'
|
||||
}
|
||||
});
|
|
@ -1,17 +1,18 @@
|
|||
#!/usr/bin/env node
|
||||
import * as fs from 'fs';
|
||||
import {
|
||||
AzureSDKTaskName,
|
||||
createTaskResult,
|
||||
LogFilter,
|
||||
logger,
|
||||
requireJsonc,
|
||||
TaskResult,
|
||||
TaskResultStatus,
|
||||
TaskOutput,
|
||||
TaskResult,
|
||||
TaskResultStatus
|
||||
} from '@azure-tools/sdk-generation-lib';
|
||||
import * as fs from 'fs';
|
||||
|
||||
import { GenerateResultCliInput, generateResultCliInput } from '../../cliSchema/generateResultCliConfig';
|
||||
|
||||
import { generateResultCliInput, GenerateResultCliInput } from './cliSchema/generateResultCliConfig';
|
||||
|
||||
generateResultCliInput.validate();
|
||||
const config: GenerateResultCliInput = generateResultCliInput.getProperties();
|
||||
|
@ -60,7 +61,7 @@ async function main() {
|
|||
);
|
||||
|
||||
fs.writeFileSync(config.resultOutputPath, JSON.stringify(taskResult, null, 2), {
|
||||
encoding: 'utf-8',
|
||||
encoding: 'utf-8'
|
||||
});
|
||||
console.log('Generate Success !!!');
|
||||
|
|
@ -1,17 +1,17 @@
|
|||
#!/usr/bin/env node
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import { prepareArtifactFilesInput, PrepareArtifactFilesInput } from './cliSchema/prepareArtifactFilesCliConfig';
|
||||
import {
|
||||
GenerateAndBuildOutput,
|
||||
getGenerateAndBuildOutput,
|
||||
logger,
|
||||
requireJsonc,
|
||||
SDK,
|
||||
TaskResultStatus,
|
||||
TaskResultStatus
|
||||
} from '@azure-tools/sdk-generation-lib';
|
||||
import { getFileListInPackageFolder } from './utils/git';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import { PrepareArtifactFilesInput, prepareArtifactFilesInput } from '../../cliSchema/prepareArtifactFilesCliConfig';
|
||||
import { getFileListInPackageFolder } from '../../utils/git';
|
||||
|
||||
function copyFile(filePath: string, targetDir: string) {
|
||||
const fileDir = path.dirname(filePath);
|
||||
|
@ -26,7 +26,7 @@ async function prepareSourceCode(
|
|||
) {
|
||||
for (const p of generateAndBuildOutput.packages) {
|
||||
const result = p.result;
|
||||
if (result === TaskResultStatus.failure) {
|
||||
if (result === TaskResultStatus.Failure) {
|
||||
logger.warn(`Build ${p.packageName} failed, skipped it`);
|
||||
continue;
|
||||
}
|
||||
|
@ -53,7 +53,7 @@ async function prepareSourceCode(
|
|||
async function prepareArtifacts(generateAndBuildOutput: GenerateAndBuildOutput, language: string, artifactDir: string) {
|
||||
for (const p of generateAndBuildOutput.packages) {
|
||||
const result = p.result;
|
||||
if (result === TaskResultStatus.failure) {
|
||||
if (result === TaskResultStatus.Failure) {
|
||||
logger.warn(`Build ${p.packageName} failed, skipped it`);
|
||||
continue;
|
||||
}
|
|
@ -1,5 +1,4 @@
|
|||
#!/usr/bin/env node
|
||||
import * as fs from 'fs';
|
||||
import {
|
||||
AzureSDKTaskName,
|
||||
BlobBasicContext,
|
||||
|
@ -13,24 +12,25 @@ import {
|
|||
QueuedEvent,
|
||||
requireJsonc,
|
||||
ResultBlobPublisher,
|
||||
ResultEventhubPublisher,
|
||||
ResultDBPublisher,
|
||||
ResultEventhubPublisher,
|
||||
SDKPipelineStatus,
|
||||
StorageType,
|
||||
TaskResult,
|
||||
Trigger,
|
||||
Trigger
|
||||
} from '@azure-tools/sdk-generation-lib';
|
||||
import * as fs from 'fs';
|
||||
|
||||
import {
|
||||
resultPublisherBlobInput,
|
||||
ResultPublisherBlobInput,
|
||||
resultPublisherDBCodeGenerationInput,
|
||||
resultPublisherBlobInput,
|
||||
ResultPublisherDBCodeGenerationInput,
|
||||
resultPublisherDBResultInput,
|
||||
resultPublisherDBCodeGenerationInput,
|
||||
ResultPublisherDBResultInput,
|
||||
resultPublisherEventHubInput,
|
||||
resultPublisherDBResultInput,
|
||||
ResultPublisherEventHubInput,
|
||||
} from './cliSchema/publishResultConfig';
|
||||
resultPublisherEventHubInput
|
||||
} from '../../cliSchema/publishResultConfig';
|
||||
|
||||
async function publishBlob() {
|
||||
resultPublisherBlobInput.validate();
|
||||
|
@ -39,7 +39,7 @@ async function publishBlob() {
|
|||
pipelineBuildId: config.pipelineBuildId,
|
||||
sdkGenerationName: config.sdkGenerationName,
|
||||
azureStorageBlobSasUrl: config.azureStorageBlobSasUrl,
|
||||
azureBlobContainerName: config.azureBlobContainerName,
|
||||
azureBlobContainerName: config.azureBlobContainerName
|
||||
};
|
||||
const resultBlobPublisher: ResultBlobPublisher = new ResultBlobPublisher(context);
|
||||
await resultBlobPublisher.uploadLogsAndResult(config.logsAndResultPath, config.taskName as AzureSDKTaskName);
|
||||
|
@ -75,7 +75,7 @@ function initMongoConnectContext(config: ResultPublisherDBCodeGenerationInput):
|
|||
database: config.mongodb.database,
|
||||
ssl: config.mongodb.ssl,
|
||||
synchronize: true,
|
||||
logging: true,
|
||||
logging: true
|
||||
};
|
||||
|
||||
return mongoConnectContext;
|
||||
|
@ -144,37 +144,37 @@ async function publishEventhub(pipelineStatus: SDKPipelineStatus) {
|
|||
const publisher: ResultEventhubPublisher = new ResultEventhubPublisher(config.eventHubConnectionString);
|
||||
|
||||
switch (pipelineStatus) {
|
||||
case 'queued':
|
||||
event = {
|
||||
status: 'queued',
|
||||
trigger: trigger,
|
||||
pipelineBuildId: config.pipelineBuildId,
|
||||
} as QueuedEvent;
|
||||
break;
|
||||
case 'in_progress':
|
||||
event = {
|
||||
status: 'in_progress',
|
||||
trigger: trigger,
|
||||
pipelineBuildId: config.pipelineBuildId,
|
||||
} as InProgressEvent;
|
||||
break;
|
||||
case 'completed':
|
||||
if (!config.resultsPath || !config.logPath) {
|
||||
throw new Error(`Invalid completed event parameter!`);
|
||||
}
|
||||
case 'queued':
|
||||
event = {
|
||||
status: 'queued',
|
||||
trigger: trigger,
|
||||
pipelineBuildId: config.pipelineBuildId
|
||||
} as QueuedEvent;
|
||||
break;
|
||||
case 'in_progress':
|
||||
event = {
|
||||
status: 'in_progress',
|
||||
trigger: trigger,
|
||||
pipelineBuildId: config.pipelineBuildId
|
||||
} as InProgressEvent;
|
||||
break;
|
||||
case 'completed':
|
||||
if (!config.resultsPath || !config.logPath) {
|
||||
throw new Error(`Invalid completed event parameter!`);
|
||||
}
|
||||
|
||||
const taskResults: TaskResult[] = getTaskResults(config.resultsPath);
|
||||
const taskTotalResult: TaskResult = generateTotalResult(taskResults, config.pipelineBuildId);
|
||||
event = {
|
||||
status: 'completed',
|
||||
trigger: trigger,
|
||||
pipelineBuildId: config.pipelineBuildId,
|
||||
logPath: config.logPath,
|
||||
result: taskTotalResult,
|
||||
} as CompletedEvent;
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported status: ` + (pipelineStatus as string));
|
||||
const taskResults: TaskResult[] = getTaskResults(config.resultsPath);
|
||||
const taskTotalResult: TaskResult = generateTotalResult(taskResults, config.pipelineBuildId);
|
||||
event = {
|
||||
status: 'completed',
|
||||
trigger: trigger,
|
||||
pipelineBuildId: config.pipelineBuildId,
|
||||
logPath: config.logPath,
|
||||
result: taskTotalResult
|
||||
} as CompletedEvent;
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unsupported status: ` + (pipelineStatus as string));
|
||||
}
|
||||
await publisher.publishEvent(event);
|
||||
await publisher.close();
|
||||
|
@ -186,17 +186,17 @@ async function main() {
|
|||
const pipelineStatus = args['pipelineStatus'];
|
||||
|
||||
switch (storageType as StorageType) {
|
||||
case StorageType.Blob:
|
||||
await publishBlob();
|
||||
break;
|
||||
case StorageType.Db:
|
||||
await publishDB(pipelineStatus);
|
||||
break;
|
||||
case StorageType.EventHub:
|
||||
await publishEventhub(pipelineStatus);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown storageType:${storageType}!`);
|
||||
case StorageType.Blob:
|
||||
await publishBlob();
|
||||
break;
|
||||
case StorageType.Db:
|
||||
await publishDB(pipelineStatus);
|
||||
break;
|
||||
case StorageType.EventHub:
|
||||
await publishEventhub(pipelineStatus);
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown storageType:${storageType}!`);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,15 +1,15 @@
|
|||
#!/usr/bin/env node
|
||||
import * as fs from 'fs';
|
||||
import {
|
||||
ArtifactBlobUploader,
|
||||
ArtifactBlobUploaderContext,
|
||||
GenerateAndBuildOutput,
|
||||
getGenerateAndBuildOutput,
|
||||
logger,
|
||||
requireJsonc,
|
||||
requireJsonc
|
||||
} from '@azure-tools/sdk-generation-lib';
|
||||
import * as fs from 'fs';
|
||||
|
||||
import { uploadBlobInput, UploadBlobInput } from './cliSchema/uploadArtifactConfig';
|
||||
import { UploadBlobInput, uploadBlobInput } from '../../cliSchema/uploadArtifactConfig';
|
||||
|
||||
async function main() {
|
||||
uploadBlobInput.validate();
|
||||
|
@ -22,7 +22,7 @@ async function main() {
|
|||
azureStorageBlobSasUrl: config.azureStorageBlobSasUrl,
|
||||
azureBlobContainerName: config.azureBlobContainerName,
|
||||
language: config.language,
|
||||
pipelineBuildId: config.pipelineBuildId,
|
||||
pipelineBuildId: config.pipelineBuildId
|
||||
};
|
||||
const artifactBlobUploader: ArtifactBlobUploader = new ArtifactBlobUploader(blobContext);
|
||||
const generateAndBuildOutputJson: GenerateAndBuildOutput = getGenerateAndBuildOutput(
|
|
@ -17,45 +17,45 @@ export const generateResultCliInput = convict<GenerateResultCliInput>({
|
|||
pipelineBuildId: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'buildId',
|
||||
arg: 'buildId'
|
||||
},
|
||||
logfile: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'logfile',
|
||||
arg: 'logfile'
|
||||
},
|
||||
logFilterStr: {
|
||||
default: null,
|
||||
nullable: true,
|
||||
format: String,
|
||||
arg: 'logFilterStr',
|
||||
arg: 'logFilterStr'
|
||||
},
|
||||
taskName: {
|
||||
default: null,
|
||||
format: ['init', 'generateAndBuild', 'mockTest', 'liveTest'],
|
||||
arg: 'taskName',
|
||||
arg: 'taskName'
|
||||
},
|
||||
exeResult: {
|
||||
default: null,
|
||||
nullable: true,
|
||||
format: ['success', 'failure'],
|
||||
arg: 'exeResult',
|
||||
arg: 'exeResult'
|
||||
},
|
||||
taskOutputPath: {
|
||||
default: null,
|
||||
nullable: true,
|
||||
format: String,
|
||||
arg: 'taskOutputPath',
|
||||
arg: 'taskOutputPath'
|
||||
},
|
||||
resultOutputPath: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'resultOutputPath',
|
||||
arg: 'resultOutputPath'
|
||||
},
|
||||
dockerResultFile: {
|
||||
default: null,
|
||||
nullable: true,
|
||||
format: String,
|
||||
arg: 'dockerResultFile',
|
||||
},
|
||||
arg: 'dockerResultFile'
|
||||
}
|
||||
});
|
||||
|
|
|
@ -12,17 +12,17 @@ export const prepareArtifactFilesInput = convict<PrepareArtifactFilesInput>({
|
|||
generateAndBuildOutputFile: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'generateAndBuildOutputFile',
|
||||
arg: 'generateAndBuildOutputFile'
|
||||
},
|
||||
artifactDir: {
|
||||
doc: 'The dir to publish artifact',
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'artifactDir',
|
||||
arg: 'artifactDir'
|
||||
},
|
||||
language: {
|
||||
default: null,
|
||||
format: ['js', 'python', 'go', 'net', 'java'],
|
||||
arg: 'language',
|
||||
},
|
||||
arg: 'language'
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env node
|
||||
import { ServiceType } from '@azure-tools/sdk-generation-lib';
|
||||
import convict from 'convict';
|
||||
|
||||
import { ServiceType } from '@azure-tools/sdk-generation-lib';
|
||||
import { assertNullOrEmpty } from '../utils/validator';
|
||||
|
||||
export class ResultPublisherBlobInput {
|
||||
|
@ -17,33 +17,33 @@ export const resultPublisherBlobInput = convict<ResultPublisherBlobInput>({
|
|||
logsAndResultPath: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'logsAndResultPath',
|
||||
arg: 'logsAndResultPath'
|
||||
},
|
||||
pipelineBuildId: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'buildId',
|
||||
arg: 'buildId'
|
||||
},
|
||||
taskName: {
|
||||
default: null,
|
||||
format: String,
|
||||
arg: 'taskName',
|
||||
arg: 'taskName'
|
||||
},
|
||||
sdkGenerationName: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'sdkGenerationName',
|
||||
arg: 'sdkGenerationName'
|
||||
},
|
||||
azureStorageBlobSasUrl: {
|
||||
default: null,
|
||||
env: 'AZURE_STORAGE_BLOB_SAS_URL',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
azureBlobContainerName: {
|
||||
default: 'sdk-generation',
|
||||
env: 'AZURE_BLOB_CONTAINER_NAME',
|
||||
format: assertNullOrEmpty,
|
||||
},
|
||||
format: assertNullOrEmpty
|
||||
}
|
||||
});
|
||||
|
||||
export class ResultPublisherDBCodeGenerationInput {
|
||||
|
@ -75,102 +75,102 @@ export const resultPublisherDBCodeGenerationInput = convict<ResultPublisherDBCod
|
|||
doc: 'The host used to connect db',
|
||||
default: null,
|
||||
env: 'SDKGENERATION_MONGODB_HOST',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
port: {
|
||||
doc: 'The port used to connect db',
|
||||
default: 10225,
|
||||
env: 'SDKGENERATION_MONGODB_PORT',
|
||||
format: Number,
|
||||
format: Number
|
||||
},
|
||||
database: {
|
||||
doc: 'The database used to connect db',
|
||||
default: null,
|
||||
env: 'SDKGENERATION_MONGODB_DATABASE',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
username: {
|
||||
doc: 'The username used to connect db',
|
||||
default: null,
|
||||
env: 'SDKGENERATION_MONGODB_USERNAME',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
password: {
|
||||
doc: 'The password used to connect db',
|
||||
default: null,
|
||||
env: 'SDKGENERATION_MONGODB_PASSWORD',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
ssl: {
|
||||
doc: 'Whether used ssl to connect db',
|
||||
default: true,
|
||||
env: 'SDKGENERATION_MONGODB_SSL',
|
||||
format: Boolean,
|
||||
},
|
||||
format: Boolean
|
||||
}
|
||||
},
|
||||
pipelineBuildId: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'buildId',
|
||||
arg: 'buildId'
|
||||
},
|
||||
sdkGenerationName: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'sdkGenerationName',
|
||||
arg: 'sdkGenerationName'
|
||||
},
|
||||
service: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'service',
|
||||
arg: 'service'
|
||||
},
|
||||
serviceType: {
|
||||
default: null,
|
||||
format: ['data-plane', 'resource-manager'],
|
||||
arg: 'serviceType',
|
||||
arg: 'serviceType'
|
||||
},
|
||||
language: {
|
||||
default: null,
|
||||
format: ['js', 'python', 'go', 'net', 'java'],
|
||||
arg: 'language',
|
||||
arg: 'language'
|
||||
},
|
||||
swaggerRepo: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'swaggerRepo',
|
||||
arg: 'swaggerRepo'
|
||||
},
|
||||
sdkRepo: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'sdkRepo',
|
||||
arg: 'sdkRepo'
|
||||
},
|
||||
codegenRepo: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'codegenRepo',
|
||||
arg: 'codegenRepo'
|
||||
},
|
||||
triggerType: {
|
||||
default: null,
|
||||
format: ['ad-hoc', 'ci', 'release'],
|
||||
arg: 'triggerType',
|
||||
arg: 'triggerType'
|
||||
},
|
||||
tag: {
|
||||
default: null,
|
||||
nullable: true,
|
||||
format: String,
|
||||
arg: 'tag',
|
||||
arg: 'tag'
|
||||
},
|
||||
owner: {
|
||||
default: null,
|
||||
nullable: true,
|
||||
format: String,
|
||||
arg: 'owner',
|
||||
arg: 'owner'
|
||||
},
|
||||
codePR: {
|
||||
default: null,
|
||||
nullable: true,
|
||||
format: String,
|
||||
arg: 'codePR',
|
||||
},
|
||||
arg: 'codePR'
|
||||
}
|
||||
});
|
||||
|
||||
export class ResultPublisherDBResultInput {
|
||||
|
@ -192,49 +192,49 @@ export const resultPublisherDBResultInput = convict<ResultPublisherDBResultInput
|
|||
doc: 'The host used to connect db',
|
||||
default: null,
|
||||
env: 'SDKGENERATION_MONGODB_HOST',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
port: {
|
||||
doc: 'The port used to connect db',
|
||||
default: 10225,
|
||||
env: 'SDKGENERATION_MONGODB_PORT',
|
||||
format: Number,
|
||||
format: Number
|
||||
},
|
||||
database: {
|
||||
doc: 'The database used to connect db',
|
||||
default: null,
|
||||
env: 'SDKGENERATION_MONGODB_DATABASE',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
username: {
|
||||
doc: 'The username used to connect db',
|
||||
default: null,
|
||||
env: 'SDKGENERATION_MONGODB_USERNAME',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
password: {
|
||||
doc: 'The password used to connect db',
|
||||
default: null,
|
||||
env: 'SDKGENERATION_MONGODB_PASSWORD',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
ssl: {
|
||||
doc: 'Whether used ssl to connect db',
|
||||
default: true,
|
||||
env: 'SDKGENERATION_MONGODB_SSL',
|
||||
format: Boolean,
|
||||
},
|
||||
format: Boolean
|
||||
}
|
||||
},
|
||||
pipelineBuildId: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'buildId',
|
||||
arg: 'buildId'
|
||||
},
|
||||
taskResultsPath: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'taskResultsPath',
|
||||
},
|
||||
arg: 'taskResultsPath'
|
||||
}
|
||||
});
|
||||
|
||||
export class ResultPublisherEventHubInput {
|
||||
|
@ -250,35 +250,35 @@ export const resultPublisherEventHubInput = convict<ResultPublisherEventHubInput
|
|||
eventHubConnectionString: {
|
||||
default: null,
|
||||
env: 'EVENTHUB_SAS_URL',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
partitionKey: {
|
||||
default: null,
|
||||
env: 'PARTITIONKEY',
|
||||
nullable: true,
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
pipelineBuildId: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'buildId',
|
||||
arg: 'buildId'
|
||||
},
|
||||
trigger: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'trigger',
|
||||
arg: 'trigger'
|
||||
},
|
||||
logPath: {
|
||||
default: null,
|
||||
nullable: true,
|
||||
format: String,
|
||||
arg: 'logPath',
|
||||
arg: 'logPath'
|
||||
},
|
||||
resultsPath: {
|
||||
doc: 'task result files array',
|
||||
default: null,
|
||||
nullable: true,
|
||||
format: String,
|
||||
arg: 'resultsPath',
|
||||
},
|
||||
arg: 'resultsPath'
|
||||
}
|
||||
});
|
||||
|
|
|
@ -14,26 +14,26 @@ export const uploadBlobInput = convict<UploadBlobInput>({
|
|||
generateAndBuildOutputFile: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'generateAndBuildOutputFile',
|
||||
arg: 'generateAndBuildOutputFile'
|
||||
},
|
||||
pipelineBuildId: {
|
||||
default: null,
|
||||
format: assertNullOrEmpty,
|
||||
arg: 'buildId',
|
||||
arg: 'buildId'
|
||||
},
|
||||
language: {
|
||||
default: null,
|
||||
format: ['js', 'python', 'go', 'net', 'java'],
|
||||
arg: 'language',
|
||||
arg: 'language'
|
||||
},
|
||||
azureStorageBlobSasUrl: {
|
||||
default: null,
|
||||
env: 'AZURE_STORAGE_BLOB_SAS_URL',
|
||||
format: assertNullOrEmpty,
|
||||
format: assertNullOrEmpty
|
||||
},
|
||||
azureBlobContainerName: {
|
||||
default: 'sdk-generation',
|
||||
env: 'AZURE_BLOB_CONTAINER_NAME',
|
||||
format: assertNullOrEmpty,
|
||||
},
|
||||
format: assertNullOrEmpty
|
||||
}
|
||||
});
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import {TaskResult} from "@azure-tools/sdk-generation-lib/dist/types/taskResult";
|
||||
import * as https from "https";
|
||||
import * as fs from "fs";
|
||||
import { TaskResult } from '@azure-tools/sdk-generation-lib/dist/types/taskResult';
|
||||
import * as fs from 'fs';
|
||||
import * as https from 'https';
|
||||
|
||||
const axios = require('axios')
|
||||
const axios = require('axios');
|
||||
|
||||
export class SdkGenerationServerClient {
|
||||
host: string;
|
||||
|
@ -22,8 +22,8 @@ export class SdkGenerationServerClient {
|
|||
}, {
|
||||
httpsAgent: new https.Agent({
|
||||
cert: this.cert,
|
||||
key: this.key,
|
||||
key: this.key
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,58 @@
|
|||
import * as child_process from 'child_process';
|
||||
import { execSync, spawn } from 'child_process';
|
||||
import * as os from 'os';
|
||||
import { Logger } from 'winston';
|
||||
|
||||
export function getFileListInPackageFolder(packageFolder: string) {
|
||||
const files = child_process
|
||||
.execSync('git ls-files -cmo --exclude-standard', { encoding: 'utf8', cwd: packageFolder })
|
||||
const files = execSync('git ls-files -cmo --exclude-standard', { encoding: 'utf8', cwd: packageFolder })
|
||||
.trim()
|
||||
.split('\n');
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
export function getHeadSha(specRepo: string) {
|
||||
const headSha = execSync(`git rev-parse HEAD`, { encoding: 'utf8', cwd: specRepo });
|
||||
return headSha.trim();
|
||||
}
|
||||
|
||||
export function getHeadRef(specRepo: string) {
|
||||
const headRef = execSync(`git rev-parse --abbrev-ref HEAD`, { encoding: 'utf8', cwd: specRepo });
|
||||
return headRef.trim();
|
||||
}
|
||||
|
||||
export function safeDirectory(sdkRepo: string) {
|
||||
execSync(`git config --global --add safe.directory ${sdkRepo}`, { encoding: 'utf8', cwd: sdkRepo });
|
||||
}
|
||||
|
||||
export function disableFileMode(sdkRepo: string) {
|
||||
execSync(`git config core.fileMode false --replace-all`, { encoding: 'utf8', cwd: sdkRepo });
|
||||
}
|
||||
|
||||
export async function getChangedPackageDirectory(repo: string): Promise<Set<string>> {
|
||||
const changedPackageDirectories: Set<string> = new Set<string>();
|
||||
const gitLsFiles = execSync(`git ls-files -mdo --exclude-standard`, { encoding: 'utf8', cwd: repo });
|
||||
const files = gitLsFiles.split(os.EOL);
|
||||
for (const filePath of files) {
|
||||
if (filePath.match(/sdk\/[^\/0-9]*\/.*/)) {
|
||||
const packageDirectory = /sdk\/[^\/0-9]*\/[^\/]*/.exec(filePath);
|
||||
if (packageDirectory) {
|
||||
changedPackageDirectories.add(packageDirectory[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return changedPackageDirectories;
|
||||
}
|
||||
|
||||
export async function cloneRepo(githubRepo: string, cwd: string, logger: Logger) {
|
||||
const child = spawn(`git`, [`clone`, `https://github.com/Azure/${githubRepo}.git`], {
|
||||
cwd: cwd,
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
});
|
||||
child.stdout.on('data', (data) => logger.log('cmdout', data.toString()));
|
||||
child.stderr.on('data', (data) => logger.log('cmderr', data.toString()));
|
||||
await new Promise((resolve) => {
|
||||
child.on('exit', (code, signal) => {
|
||||
resolve({ code, signal });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
@ -0,0 +1,101 @@
|
|||
import { execSync } from 'child_process';
|
||||
import commandLineArgs from 'command-line-args';
|
||||
import { existsSync, mkdirSync } from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as process from 'process';
|
||||
|
||||
const repoCommitId = {
|
||||
'azure-rest-api-specs': '0baca05c851c1749e92beb0d2134cd958827dd54',
|
||||
'azure-sdk-for-js': '67946c5b0ce135f58ecfeab1443e5be52604908e',
|
||||
'azure-sdk-for-java': '307df24267304fbf3947025bef7eaf9698410de8',
|
||||
'azure-sdk-for-python': '53f66170cc47739204cedfe0a46989290c047c98',
|
||||
'azure-sdk-for-go': '241bdb849ce431e1a5e398a5649cde93149ee374',
|
||||
'azure-sdk-for-net': 'e9db0733a642d50c34101339f74fdc487599d824'
|
||||
};
|
||||
|
||||
const defaultImageName = 'sdkgeneration.azurecr.io/sdk-generation:v1.0';
|
||||
const integrationBranch = 'sdkgeneration-integration-test';
|
||||
|
||||
async function prepareRepo(currentPath: string, repoName: string) {
|
||||
const tmpFolder = path.join(currentPath, 'tmp');
|
||||
if (!existsSync(tmpFolder)) {
|
||||
mkdirSync(tmpFolder);
|
||||
}
|
||||
|
||||
if (!existsSync(path.join(tmpFolder, repoName))) {
|
||||
execSync(`git clone https://github.com/Azure/${repoName}.git`, {
|
||||
cwd: tmpFolder,
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
execSync(`git restore --staged . && git restore . && git checkout . && git clean -fd`, {
|
||||
cwd: path.join(tmpFolder, repoName),
|
||||
stdio: 'inherit'
|
||||
});
|
||||
|
||||
if (!!repoCommitId[repoName] && execSync(`git rev-parse HEAD`, {
|
||||
encoding: 'utf-8',
|
||||
cwd: path.join(tmpFolder, repoName)
|
||||
}).trim() !== repoCommitId[repoName]) {
|
||||
execSync(`git checkout ${repoCommitId[repoName]}`, {
|
||||
cwd: path.join(tmpFolder, repoName),
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
|
||||
if (execSync(`git rev-parse --abbrev-ref HEAD`, {
|
||||
encoding: 'utf-8',
|
||||
cwd: path.join(tmpFolder, repoName)
|
||||
}).trim() !== integrationBranch) {
|
||||
execSync(`git switch -c ${integrationBranch}`, {
|
||||
cwd: path.join(tmpFolder, repoName),
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function runDocker(currentPath: string, sdkRepoName: string, dockerImage: string) {
|
||||
const tmpFolder = path.join(currentPath, 'tmp');
|
||||
// eslint-disable-next-line max-len
|
||||
execSync(`docker run -v ${path.join(tmpFolder, 'azure-rest-api-specs')}:/spec-repo -v ${path.join(tmpFolder, sdkRepoName)}:/sdk-repo ${dockerImage} --readme=specification/agrifood/resource-manager/readme.md`, {
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
|
||||
async function buildDockImage(rushCwd: string, dockerCwd: string) {
|
||||
execSync(`rushx pack`, {
|
||||
cwd: rushCwd,
|
||||
stdio: 'inherit'
|
||||
});
|
||||
execSync(`docker build -t ${defaultImageName} .`, {
|
||||
cwd: dockerCwd,
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
|
||||
export async function main(options: any) {
|
||||
const currentPath = path.resolve(__dirname);
|
||||
if (!options['docker-image']) {
|
||||
await buildDockImage(path.join(currentPath, '..', '..'), path.join(currentPath, '..', '..', '..', '..'));
|
||||
options['docker-image'] = defaultImageName;
|
||||
}
|
||||
if (!options['sdk-repo']) {
|
||||
options['sdk-repo'] = Object.keys(repoCommitId).filter((ele) => ele !== 'azure-rest-api-specs').join(',');
|
||||
}
|
||||
await prepareRepo(currentPath, 'azure-rest-api-specs');
|
||||
for (const sdkRepo of options['sdk-repo'].split(',')) {
|
||||
await prepareRepo(currentPath, sdkRepo);
|
||||
await runDocker(currentPath, sdkRepo, options['docker-image']);
|
||||
}
|
||||
}
|
||||
|
||||
const optionDefinitions = [
|
||||
{ name: 'docker-image', type: String },
|
||||
{ name: 'sdk-repo', type: String }
|
||||
];
|
||||
const options = commandLineArgs(optionDefinitions);
|
||||
|
||||
main(options).catch((err) => {
|
||||
console.log(err);
|
||||
process.exit(1);
|
||||
});
|
|
@ -0,0 +1,84 @@
|
|||
import { initializeLogger } from '@azure-tools/sdk-generation-lib';
|
||||
import { existsSync } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import { DockerContext } from '../../src/cli/dockerCli/core/DockerContext';
|
||||
import {
|
||||
DockerTaskEngineContext
|
||||
} from '../../src/cli/dockerCli/core/DockerTaskEngineContext';
|
||||
import { SDKGenerationTaskBase } from '../../src/cli/dockerCli/core/tasks/SDKGenerationTaskBase';
|
||||
|
||||
describe('task engine', () => {
|
||||
it('should initialize a DockerTaskEngineContext by DockerContext', async () => {
|
||||
const dockerContext = new DockerContext();
|
||||
const tmpFolder = path.join(path.resolve('.'), 'test', 'unit', 'tmp');
|
||||
dockerContext.initialize({
|
||||
readmeMdPath: 'specification/agrifood/resource-manager/readme.md',
|
||||
tag: '',
|
||||
sdkList: '',
|
||||
specRepo: path.join(tmpFolder, 'spec-repo'),
|
||||
workDir: '/work-dir',
|
||||
sdkRepo: path.join(tmpFolder, 'sdk-repo'),
|
||||
resultOutputFolder: path.join(tmpFolder, 'output'),
|
||||
dockerLogger: 'docker.log'
|
||||
});
|
||||
const dockerTaskEngineContext = new DockerTaskEngineContext();
|
||||
dockerTaskEngineContext.initialize(dockerContext);
|
||||
expect(dockerTaskEngineContext.configFilePath).toBe('eng/codegen_to_sdk_config.json');
|
||||
expect(dockerTaskEngineContext.initOutputJsonFile).toBe(path.join(tmpFolder, 'output', 'initOutput.json'));
|
||||
expect(dockerTaskEngineContext.generateAndBuildInputJsonFile).toBe(path.join(tmpFolder, 'output', 'generateAndBuildInput.json'));
|
||||
expect(dockerTaskEngineContext.generateAndBuildOutputJsonFile).toBe(path.join(tmpFolder, 'output', 'generateAndBuildOutputJson.json'));
|
||||
expect(dockerTaskEngineContext.mockTestInputJsonFile).toBe(path.join(tmpFolder, 'output', 'mockTestInput.json'));
|
||||
expect(dockerTaskEngineContext.mockTestOutputJsonFile).toBe(path.join(tmpFolder, 'output', 'mockTestOutput.json'));
|
||||
expect(dockerTaskEngineContext.initTaskLog).toBe(path.join(tmpFolder, 'output', 'init-task.log'));
|
||||
expect(dockerTaskEngineContext.generateAndBuildTaskLog).toBe(path.join(tmpFolder, 'output', 'generateAndBuild-task.log'));
|
||||
expect(dockerTaskEngineContext.mockTestTaskLog).toBe(path.join(tmpFolder, 'output', 'mockTest-task.log'));
|
||||
expect(dockerTaskEngineContext.readmeMdPath).toBe('specification/agrifood/resource-manager/readme.md');
|
||||
});
|
||||
|
||||
it('should get task list', async () => {
|
||||
const tmpFolder = path.join(path.resolve('.'), 'test', 'unit', 'tmp');
|
||||
const dockerTaskEngineContext = new DockerTaskEngineContext();
|
||||
dockerTaskEngineContext.sdkRepo = path.join(tmpFolder, 'sdk-repo');
|
||||
dockerTaskEngineContext.configFilePath = 'eng/codegen_to_sdk_config.json';
|
||||
dockerTaskEngineContext.logger = initializeLogger(path.join(tmpFolder, 'docker.log'), 'docker', true);
|
||||
const tasksToRun: SDKGenerationTaskBase[] = await dockerTaskEngineContext.getTaskToRun();
|
||||
expect(tasksToRun.length).toEqual(2);
|
||||
expect(tasksToRun[0].taskType).toEqual('InitTask');
|
||||
expect(tasksToRun[1].taskType).toEqual('GenerateAndBuildTask');
|
||||
});
|
||||
|
||||
it('should run tasks', async () => {
|
||||
jest.setTimeout(999999);
|
||||
const tmpFolder = path.join(path.resolve('.'), 'test', 'unit', 'tmp');
|
||||
const dockerTaskEngineContext = new DockerTaskEngineContext();
|
||||
|
||||
dockerTaskEngineContext.sdkRepo = path.join(tmpFolder, 'sdk-repo');
|
||||
dockerTaskEngineContext.taskResultJsonPath = path.join(tmpFolder, 'output', 'taskResults.json');
|
||||
dockerTaskEngineContext.logger = initializeLogger(path.join(tmpFolder, 'docker.log'), 'docker', true);
|
||||
dockerTaskEngineContext.configFilePath = 'eng/codegen_to_sdk_config.json';
|
||||
dockerTaskEngineContext.initOutputJsonFile = path.join(tmpFolder, 'output', 'initOutput.json');
|
||||
dockerTaskEngineContext.generateAndBuildInputJsonFile = path.join(tmpFolder, 'output', 'generateAndBuildInput.json');
|
||||
dockerTaskEngineContext.generateAndBuildOutputJsonFile = path.join(tmpFolder, 'output', 'generateAndBuildOutputJson.json');
|
||||
dockerTaskEngineContext.mockTestInputJsonFile = path.join(tmpFolder, 'output', 'mockTestInput.json');
|
||||
dockerTaskEngineContext.mockTestOutputJsonFile = path.join(tmpFolder, 'output', 'mockTestOutput.json');
|
||||
dockerTaskEngineContext.initTaskLog = path.join(tmpFolder, 'output', 'init-task.log');
|
||||
dockerTaskEngineContext.generateAndBuildTaskLog = path.join(tmpFolder, 'output', 'generateAndBuild-task.log');
|
||||
dockerTaskEngineContext.mockTestTaskLog = path.join(tmpFolder, 'output', 'mockTest-task.log');
|
||||
dockerTaskEngineContext.readmeMdPath = 'specification/agrifood/resource-manager/readme.md';
|
||||
dockerTaskEngineContext.specRepo = {
|
||||
repoPath: path.join(tmpFolder, 'spec-repo'),
|
||||
headSha: '11111',
|
||||
headRef: '11111',
|
||||
repoHttpsUrl: 'https://github.com/Azure/azure-rest-api-specs'
|
||||
};
|
||||
dockerTaskEngineContext.changeOwner = false;
|
||||
|
||||
await dockerTaskEngineContext.runTaskEngine();
|
||||
expect(existsSync(dockerTaskEngineContext.initTaskLog)).toBe(true);
|
||||
expect(existsSync(dockerTaskEngineContext.generateAndBuildInputJsonFile)).toBe(true);
|
||||
expect(existsSync(dockerTaskEngineContext.generateAndBuildOutputJsonFile)).toBe(true);
|
||||
expect(existsSync(dockerTaskEngineContext.generateAndBuildTaskLog)).toBe(true);
|
||||
expect(existsSync(dockerTaskEngineContext.taskResultJsonPath)).toBe(true);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,57 @@
|
|||
import { execSync } from 'child_process';
|
||||
import { existsSync, mkdirSync } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
function mkdirTmpFolderIfNotExist(tmpFolder: string) {
|
||||
if (!existsSync(tmpFolder)) {
|
||||
mkdirSync(tmpFolder);
|
||||
}
|
||||
}
|
||||
|
||||
function cloneSpecRepoIfNotExist(tmpFolder: string) {
|
||||
if (!existsSync(path.join(tmpFolder, 'spec-repo'))) {
|
||||
execSync(`git clone https://github.com/Azure/azure-rest-api-specs.git spec-repo`, {
|
||||
cwd: tmpFolder,
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
execSync(`git checkout 0baca05c851c1749e92beb0d2134cd958827dd54`, {
|
||||
cwd: path.join(tmpFolder, 'spec-repo'),
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
|
||||
function cloneSdkRepoIfNotExist(tmpFolder: string) {
|
||||
if (!existsSync(path.join(tmpFolder, 'sdk-repo'))) {
|
||||
execSync(`git clone https://github.com/Azure/azure-sdk-for-js.git sdk-repo`, {
|
||||
cwd: tmpFolder,
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
execSync(`git checkout . && git clean -fd`, {
|
||||
cwd: path.join(tmpFolder, 'sdk-repo'),
|
||||
stdio: 'inherit'
|
||||
});
|
||||
execSync(`git checkout 67946c5b0ce135f58ecfeab1443e5be52604908e`, {
|
||||
cwd: path.join(tmpFolder, 'sdk-repo'),
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
|
||||
function mkdirResultOutputFolderIfNotExist(tmpFolder: string) {
|
||||
if (!existsSync(path.join(tmpFolder, 'output'))) {
|
||||
mkdirSync(path.join(tmpFolder, 'output'));
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const tmpFolder = path.join(path.resolve(__dirname), '..', 'tmp');
|
||||
mkdirTmpFolderIfNotExist(tmpFolder);
|
||||
cloneSpecRepoIfNotExist(tmpFolder);
|
||||
cloneSdkRepoIfNotExist(tmpFolder);
|
||||
mkdirResultOutputFolderIfNotExist(tmpFolder);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.log(e);
|
||||
});
|
|
@ -0,0 +1,37 @@
|
|||
# @azure-tools/sdk-generation-lib
|
||||
|
||||
This packages includes some basic functionalities and definitions used by sdk generation pipeline.
|
||||
|
||||
## Install
|
||||
|
||||
```shell
|
||||
npm i @azure-tools/sdk-generation-lib
|
||||
```
|
||||
|
||||
# Functionalities
|
||||
|
||||
| Functionalities | Description |
|
||||
|------------------|------------------------------------------------------------------------------|
|
||||
| runScript | Run any kind of script/command os supported. |
|
||||
| createTaskResult | It parses the logs produced by tasks, and generate a summarized task result. |
|
||||
| executeTask | The wrapper of `runScript` and `createTaskResult`. |
|
||||
| logger | The logger instance can be used by sdk generation pipeline. |
|
||||
| getTask | Get task configuration from sdk repo's task configuration. |
|
||||
|
||||
# Definitions
|
||||
|
||||
| Definitions | Description |
|
||||
|-------------------------|--------------------------------------------------------|
|
||||
| CodegenToSdkConfig | The configuration type of `codegen_to_sdk_config.json` |
|
||||
| InitOptions | The configuration type of init task. |
|
||||
| GenerateAndBuildOptions | The configuration type of generate and build task. |
|
||||
| MockTestOptions | The configuration type of mock test task. |
|
||||
| RunOptions | The configuration type of running script. |
|
||||
| LogFilter | The configuration type of filtering log. |
|
||||
| InitOutput | The output type of init task. |
|
||||
| GenerateAndBuildInput | The input type of generate and build task. |
|
||||
| GenerateAndBuildOutput | The output type of generate and build task. |
|
||||
| MockTestInput | The input type of mock test task. |
|
||||
| TestOutput | The output type of mock test task. |
|
||||
| TaskResultStatus | The task status. |
|
||||
| TaskResult | The details of a task result. |
|
|
@ -1,54 +1,63 @@
|
|||
{
|
||||
"name": "@azure-tools/sdk-generation-lib",
|
||||
"version": "1.0.4",
|
||||
"description": "",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "rimraf dist && tsc && npm run copy",
|
||||
"copy": "copyfiles -u 1 src/types/taskInputAndOuputSchemaTypes/*.json dist",
|
||||
"prepack": "npm run build",
|
||||
"pack": "npm pack 2>&1",
|
||||
"test": "jest --forceExit --detectOpenHandles --coverage=true"
|
||||
},
|
||||
"author": "Microsoft",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist/**/*.js",
|
||||
"dist/**/*.json",
|
||||
"dist/**/*.js.map",
|
||||
"dist/**/*.d.ts",
|
||||
"dist/**/*.d.ts.map",
|
||||
"dist/**/*.handlebars",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"dependencies": {
|
||||
"@azure/event-hubs": "~5.5.1",
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"@azure/swagger-validation-common": "^0.1.2",
|
||||
"@octokit/auth-app": "^2.4.5",
|
||||
"@octokit/rest": "^18.0.3",
|
||||
"ajv": "^6.12.6",
|
||||
"class-validator": "^0.13.2",
|
||||
"colors": "1.4.0",
|
||||
"convict": "^6.2.3",
|
||||
"jsonc-parser": "^3.0.0",
|
||||
"hot-shots": "^8.5.2",
|
||||
"memory-fs": "^0.5.0",
|
||||
"mongodb": "^3.6.10",
|
||||
"node-yaml": "^3.2.0",
|
||||
"typeorm": "^0.2.37",
|
||||
"winston": "^3.7.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^16.11.7",
|
||||
"copyfiles": "^2.4.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"jest": "~26.6.3",
|
||||
"ts-jest": "~26.5.4",
|
||||
"@types/jest": "^25.2.1",
|
||||
"typescript": "~4.6.3",
|
||||
"ts-node": "~10.7.0"
|
||||
}
|
||||
"name": "@azure-tools/sdk-generation-lib",
|
||||
"version": "1.0.4",
|
||||
"description": "",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"build": "rimraf dist && tsc && npm run copy",
|
||||
"copy": "copyfiles -u 1 src/types/taskInputAndOuputSchemaTypes/*.json dist",
|
||||
"prepack": "npm run build",
|
||||
"pack": "npm pack 2>&1",
|
||||
"test": "jest --forceExit --detectOpenHandles --coverage=true",
|
||||
"lint": "eslint . -c ../../.eslintrc.json --ignore-path ../../.eslintignore --ext .ts",
|
||||
"lint:fix": "eslint . -c ../../.eslintrc.json --ignore-path ../../.eslintignore --ext .ts --fix"
|
||||
},
|
||||
"author": "Microsoft",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist/**/*.js",
|
||||
"dist/**/*.json",
|
||||
"dist/**/*.js.map",
|
||||
"dist/**/*.d.ts",
|
||||
"dist/**/*.d.ts.map",
|
||||
"dist/**/*.handlebars",
|
||||
"LICENSE",
|
||||
"README.md"
|
||||
],
|
||||
"dependencies": {
|
||||
"@azure/event-hubs": "~5.5.1",
|
||||
"@azure/storage-blob": "^12.8.0",
|
||||
"@azure/swagger-validation-common": "^0.1.2",
|
||||
"@octokit/auth-app": "^2.4.5",
|
||||
"@octokit/rest": "^18.0.3",
|
||||
"ajv": "^6.12.6",
|
||||
"class-validator": "^0.13.2",
|
||||
"colors": "1.4.0",
|
||||
"convict": "^6.2.3",
|
||||
"jsonc-parser": "^3.0.0",
|
||||
"hot-shots": "^8.5.2",
|
||||
"memory-fs": "^0.5.0",
|
||||
"mongodb": "^3.6.10",
|
||||
"node-yaml": "^3.2.0",
|
||||
"typeorm": "^0.2.37",
|
||||
"winston": "^3.7.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^16.11.7",
|
||||
"copyfiles": "^2.4.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"jest": "~26.6.3",
|
||||
"ts-jest": "~26.5.4",
|
||||
"@types/jest": "^25.2.1",
|
||||
"typescript": "~4.6.3",
|
||||
"ts-node": "~10.7.0","eslint": "^8.16.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.25.0",
|
||||
"eslint-config-google": "^0.14.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-n": "^15.0.0",
|
||||
"eslint-plugin-promise": "^6.0.0",
|
||||
"@typescript-eslint/parser": "^5.25.0",
|
||||
"eslint-plugin-simple-import-sort": "^7.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
import * as child_process from 'child_process';
|
||||
import * as childProcess from 'child_process';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
import { AzureBlobClient } from '../utils/blob/AzureBlobClient';
|
||||
import { GenerateAndBuildOutput } from '../types/taskInputAndOuputSchemaTypes/GenerateAndBuildOutput';
|
||||
import { logger } from '../utils/logger';
|
||||
import { SDK } from '../types/commonType';
|
||||
import { GenerateAndBuildOutput } from '../types/taskInputAndOuputSchemaTypes/GenerateAndBuildOutput';
|
||||
import { TaskResultStatus } from '../types/taskResult';
|
||||
import { AzureBlobClient } from '../utils/blob/AzureBlobClient';
|
||||
import { logger } from '../utils/logger';
|
||||
|
||||
function getFileListInPackageFolder(packageFolder: string) {
|
||||
const files = child_process
|
||||
const files = childProcess
|
||||
.execSync('git ls-files -cmo --exclude-standard', { encoding: 'utf8', cwd: packageFolder })
|
||||
.trim()
|
||||
.split('\n');
|
||||
|
@ -57,7 +57,7 @@ export class ArtifactBlobUploader {
|
|||
public async uploadSourceCode(generateAndBuildOutputJson: GenerateAndBuildOutput) {
|
||||
for (const p of generateAndBuildOutputJson.packages) {
|
||||
const result = p.result;
|
||||
if (result === TaskResultStatus.failure) {
|
||||
if (result === TaskResultStatus.Failure) {
|
||||
logger.warn(`Build ${p.packageName} failed, skipped it`);
|
||||
continue;
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ export class ArtifactBlobUploader {
|
|||
public async uploadArtifacts(generateAndBuildOutputJson: GenerateAndBuildOutput) {
|
||||
for (const p of generateAndBuildOutputJson.packages) {
|
||||
const result = p.result;
|
||||
if (result === TaskResultStatus.failure) {
|
||||
if (result === TaskResultStatus.Failure) {
|
||||
logger.warn(`Build ${p.packageName} failed, skipped it`);
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
import * as fs from 'fs';
|
||||
|
||||
import { Connection, createConnection } from 'typeorm';
|
||||
import { AzureBlobClient } from '../utils/blob/AzureBlobClient';
|
||||
import { AzureSDKTaskName, SDKPipelineStatus } from '../types/commonType';
|
||||
|
||||
import { CodeGeneration } from '../types/codeGeneration';
|
||||
import { CodeGenerationDao } from '../utils/db/codeGenerationDao';
|
||||
import { CodeGenerationDaoImpl } from '../utils/db/codeGenerationDaoImpl';
|
||||
import { EventHubProducer } from '../utils/eventhub/EventHubProducer';
|
||||
import { logger } from '../utils/logger';
|
||||
import { AzureSDKTaskName } from '../types/commonType';
|
||||
import { PipelineRunEvent } from '../types/events';
|
||||
import { TaskResult, TaskResultEntity } from '../types/taskResult';
|
||||
import { AzureBlobClient } from '../utils/blob/AzureBlobClient';
|
||||
import { CodeGenerationDao } from '../utils/db/codeGenerationDao';
|
||||
import { CodeGenerationDaoImpl } from '../utils/db/codeGenerationDaoImpl';
|
||||
import { TaskResultDao } from '../utils/db/taskResultDao';
|
||||
import { TaskResultDaoImpl } from '../utils/db/taskResultDaoImpl';
|
||||
import { EventHubProducer } from '../utils/eventhub/EventHubProducer';
|
||||
import { logger } from '../utils/logger';
|
||||
|
||||
export type MongoConnectContext = {
|
||||
name: string;
|
||||
|
@ -50,7 +50,7 @@ export class ResultDBPublisher {
|
|||
ssl: this.context.ssl,
|
||||
synchronize: this.context.synchronize,
|
||||
logging: this.context.logging,
|
||||
entities: [TaskResultEntity, CodeGeneration],
|
||||
entities: [TaskResultEntity, CodeGeneration]
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -101,9 +101,9 @@ export class ResultBlobPublisher {
|
|||
const logsAndResultPathArray = JSON.parse(logsAndResultPath);
|
||||
for (const file of logsAndResultPathArray) {
|
||||
if (fs.existsSync(file)) {
|
||||
let blobName: string = file.includes('.json')
|
||||
? `${this.pipelineBuildId}/logs/${this.sdkGenerationName}-${taskName}-result.json`
|
||||
: `${this.pipelineBuildId}/logs/${this.sdkGenerationName}-${taskName}.log`;
|
||||
const blobName: string = file.includes('.json') ?
|
||||
`${this.pipelineBuildId}/logs/${this.sdkGenerationName}-${taskName}-result.json` :
|
||||
`${this.pipelineBuildId}/logs/${this.sdkGenerationName}-${taskName}.log`;
|
||||
await this.azureBlobClient.publishBlob(file, blobName);
|
||||
logger.info(`Publish ${file} Success !!!`);
|
||||
} else {
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
import * as fs from 'fs';
|
||||
|
||||
import { AzureSDKTaskName } from '../types/commonType';
|
||||
import { getTaskBasicConfig, TaskBasicConfig } from '../types/taskBasicConfig';
|
||||
import { RunOptions } from '../types/taskInputAndOuputSchemaTypes/CodegenToSdkConfig';
|
||||
import { GenerateAndBuildInput } from '../types/taskInputAndOuputSchemaTypes/GenerateAndBuildInput';
|
||||
|
@ -6,12 +9,10 @@ import { InitOutput } from '../types/taskInputAndOuputSchemaTypes/InitOutput';
|
|||
import { LiveTestInput } from '../types/taskInputAndOuputSchemaTypes/LiveTestInput';
|
||||
import { MockTestInput } from '../types/taskInputAndOuputSchemaTypes/MockTestInput';
|
||||
import { TestOutput } from '../types/taskInputAndOuputSchemaTypes/TestOutput';
|
||||
import { TaskResultStatus, TaskResult } from '../types/taskResult';
|
||||
import { TaskResult } from '../types/taskResult';
|
||||
import { requireJsonc } from '../utils/requireJsonc';
|
||||
import { runScript } from './runScript';
|
||||
import * as fs from 'fs';
|
||||
import { createTaskResult } from './generateResult';
|
||||
import { AzureSDKTaskName } from '../types/commonType';
|
||||
import { runScript } from './runScript';
|
||||
|
||||
export async function executeTask(
|
||||
taskName: AzureSDKTaskName,
|
||||
|
@ -30,14 +31,10 @@ export async function executeTask(
|
|||
args.push(inputJsonPath);
|
||||
}
|
||||
args.push(outputJsonPath);
|
||||
const result = await runScript(runScriptOptions, {
|
||||
const execResult = await runScript(runScriptOptions, {
|
||||
cwd: cwd,
|
||||
args: args,
|
||||
args: args
|
||||
});
|
||||
let execResult: TaskResultStatus = TaskResultStatus.success;
|
||||
if (result === 'failed') {
|
||||
execResult = TaskResultStatus.failure;
|
||||
}
|
||||
if (fs.existsSync(outputJsonPath)) {
|
||||
const outputJson = requireJsonc(outputJsonPath);
|
||||
return {
|
||||
|
@ -49,7 +46,7 @@ export async function executeTask(
|
|||
runScriptOptions.logFilter,
|
||||
outputJson
|
||||
),
|
||||
output: outputJson,
|
||||
output: outputJson
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
|
@ -61,7 +58,7 @@ export async function executeTask(
|
|||
runScriptOptions.logFilter,
|
||||
undefined
|
||||
),
|
||||
output: undefined,
|
||||
output: undefined
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
import * as fs from 'fs';
|
||||
|
||||
import { AzureSDKTaskName } from '../types/commonType';
|
||||
import { LogFilter } from '../types/taskInputAndOuputSchemaTypes/CodegenToSdkConfig';
|
||||
import { TestOutput } from '../types/taskInputAndOuputSchemaTypes/TestOutput';
|
||||
import {
|
||||
TaskResultCommon,
|
||||
MessageRecord,
|
||||
RawMessageRecord,
|
||||
TaskOutput,
|
||||
TaskResult,
|
||||
TaskResultCommon,
|
||||
TaskResultStatus,
|
||||
TestTaskResult,
|
||||
TestTaskResult
|
||||
} from '../types/taskResult';
|
||||
import { logger } from '../utils/logger';
|
||||
import { isLineMatch } from './runScript';
|
||||
import * as fs from 'fs';
|
||||
|
||||
const logSeparatorLength = 26; // length of '20xx-xx-xx xx:xx:xx cmdout'
|
||||
const timestampLength = 19; // length of '20xx-xx-xx xx:xx:xx'
|
||||
|
@ -62,7 +63,7 @@ export function parseGenerateLog(
|
|||
level: 'Error',
|
||||
message: line,
|
||||
time: new Date(line.substring(0, timestampLength)),
|
||||
type: 'Raw',
|
||||
type: 'Raw'
|
||||
};
|
||||
messages.push(message);
|
||||
} else if (isLineMatch(line.toLowerCase(), logWarningFilter)) {
|
||||
|
@ -71,7 +72,7 @@ export function parseGenerateLog(
|
|||
level: 'Warning',
|
||||
message: line,
|
||||
time: new Date(line.substring(0, timestampLength)),
|
||||
type: 'Raw',
|
||||
type: 'Raw'
|
||||
};
|
||||
messages.push(message);
|
||||
}
|
||||
|
@ -85,7 +86,7 @@ export function parseGenerateLog(
|
|||
pipelineBuildId: pipelineBuildId,
|
||||
errorCount: errorNum,
|
||||
warningCount: warnNum,
|
||||
messages: messages,
|
||||
messages: messages
|
||||
};
|
||||
|
||||
return result;
|
||||
|
@ -100,13 +101,13 @@ export function createTaskResult(
|
|||
taskOutput: TaskOutput
|
||||
): TaskResult {
|
||||
let commonResult: TaskResultCommon = undefined;
|
||||
if (taskExeResult === TaskResultStatus.success) {
|
||||
if (taskExeResult === TaskResultStatus.Success) {
|
||||
commonResult = {
|
||||
name: taskname,
|
||||
pipelineBuildId: pipelineBuildId,
|
||||
result: taskExeResult,
|
||||
errorCount: 0,
|
||||
warningCount: 0,
|
||||
warningCount: 0
|
||||
};
|
||||
} else {
|
||||
commonResult = parseGenerateLog(pipelineBuildId, taskname, logfile, logFilter);
|
||||
|
@ -121,7 +122,7 @@ export function createTaskResult(
|
|||
apiCoverage: 0,
|
||||
codeCoverage: 0,
|
||||
result: taskExeResult,
|
||||
...commonResult,
|
||||
...commonResult
|
||||
};
|
||||
}
|
||||
const testOutput: TestOutput = taskOutput as TestOutput;
|
||||
|
@ -132,7 +133,7 @@ export function createTaskResult(
|
|||
apiCoverage: testOutput.apiCoverage,
|
||||
codeCoverage: testOutput.codeCoverage,
|
||||
result: taskExeResult,
|
||||
...commonResult,
|
||||
...commonResult
|
||||
};
|
||||
return testTaskResult;
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ import {
|
|||
getCodegenToSdkConfig,
|
||||
InitOptions,
|
||||
LiveTestOptions,
|
||||
MockTestOptions,
|
||||
MockTestOptions
|
||||
} from '../types/taskInputAndOuputSchemaTypes/CodegenToSdkConfig';
|
||||
import { requireJsonc } from '../utils/requireJsonc';
|
||||
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
import { RunOptions } from '../types/taskInputAndOuputSchemaTypes/CodegenToSdkConfig';
|
||||
import * as path from 'path';
|
||||
import { spawn } from 'child_process';
|
||||
import { logger } from '../utils/logger';
|
||||
import { Readable } from 'stream';
|
||||
import { scriptRunningState } from '../types/scriptRunningState';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { Readable } from 'stream';
|
||||
import { Logger } from 'winston';
|
||||
|
||||
import { StringMap, TaskResultStatus } from '../types';
|
||||
import { RunOptions } from '../types/taskInputAndOuputSchemaTypes/CodegenToSdkConfig';
|
||||
import { logger as globalLogger } from '../utils/logger';
|
||||
|
||||
let logger = globalLogger;
|
||||
|
||||
export const isLineMatch = (line: string, filter: RegExp | undefined) => {
|
||||
if (filter === undefined) {
|
||||
|
@ -14,12 +18,16 @@ export const isLineMatch = (line: string, filter: RegExp | undefined) => {
|
|||
return filter.exec(line) !== null;
|
||||
};
|
||||
|
||||
const listenOnStream = (prefix: string, stream: Readable, logType: 'cmdout' | 'cmderr') => {
|
||||
const listenOnStream = (
|
||||
prefix: string,
|
||||
stream: Readable,
|
||||
logType: 'cmdout' | 'cmderr'
|
||||
) => {
|
||||
const addLine = (line: string) => {
|
||||
if (line.length === 0) {
|
||||
return;
|
||||
}
|
||||
logger.log(logType, `${prefix} ${line}`, { show: true });
|
||||
logger.log(logType, `${prefix} ${line}`);
|
||||
};
|
||||
|
||||
stream.on('data', (data) => {
|
||||
|
@ -27,25 +35,29 @@ const listenOnStream = (prefix: string, stream: Readable, logType: 'cmdout' | 'c
|
|||
});
|
||||
};
|
||||
|
||||
export async function runScript(
|
||||
runOptions: RunOptions,
|
||||
options: {
|
||||
cwd: string;
|
||||
args?: string[];
|
||||
export async function runScript(runOptions: RunOptions, options: {
|
||||
cwd: string;
|
||||
args?: string[];
|
||||
envs?: StringMap<string | boolean | number>;
|
||||
customizedLogger?: Logger;
|
||||
}): Promise<TaskResultStatus> {
|
||||
if (!!options?.customizedLogger) {
|
||||
logger = options.customizedLogger;
|
||||
}
|
||||
): Promise<string> {
|
||||
let executeResult: scriptRunningState;
|
||||
|
||||
let executeResult: TaskResultStatus;
|
||||
const scriptCmd = runOptions.script;
|
||||
const scriptPath = runOptions.path.trim();
|
||||
const env = { PWD: path.resolve(options.cwd), ...process.env };
|
||||
const env = { ...process.env, PWD: path.resolve(options.cwd), ...options.envs };
|
||||
|
||||
for (const e of runOptions.envs) {
|
||||
env[e] = process.env[e];
|
||||
}
|
||||
let cmdRet: { code: number | null; signal: NodeJS.Signals | null } = {
|
||||
code: null,
|
||||
signal: null,
|
||||
signal: null
|
||||
};
|
||||
logger.log('cmdout', 'task script path:' + path.join(options.cwd, scriptPath));
|
||||
logger.log('cmdout', 'task script path:' + path.join(options.cwd, scriptPath) );
|
||||
if (fs.existsSync(path.join(options.cwd, scriptPath))) {
|
||||
logger.log('cmdout', 'chmod');
|
||||
fs.chmodSync(path.join(options.cwd, scriptPath), '777');
|
||||
|
@ -53,7 +65,7 @@ export async function runScript(
|
|||
|
||||
try {
|
||||
let command: string = '';
|
||||
let args: string[] = [];
|
||||
let args:string[] = [];
|
||||
const scriptPaths: string[] = scriptPath.split(' ');
|
||||
if (scriptCmd !== undefined && scriptCmd.length > 0) {
|
||||
command = scriptCmd;
|
||||
|
@ -67,7 +79,7 @@ export async function runScript(
|
|||
cwd: options.cwd,
|
||||
shell: false,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env,
|
||||
env
|
||||
});
|
||||
const prefix = `[${runOptions.logPrefix ?? path.basename(scriptPath)}]`;
|
||||
listenOnStream(prefix, child.stdout, 'cmdout');
|
||||
|
@ -79,31 +91,19 @@ export async function runScript(
|
|||
});
|
||||
});
|
||||
if (cmdRet.code === 0) {
|
||||
executeResult = 'succeeded';
|
||||
executeResult = TaskResultStatus.Success;
|
||||
} else {
|
||||
executeResult = 'failed';
|
||||
executeResult = TaskResultStatus.Failure;
|
||||
}
|
||||
} catch (e) {
|
||||
cmdRet.code = -1;
|
||||
logger.error(`${e.message}\n${e.stack}`);
|
||||
executeResult = 'failed';
|
||||
executeResult = TaskResultStatus.Failure;
|
||||
}
|
||||
let storeLog = false;
|
||||
if ((cmdRet.code !== 0 || cmdRet.signal !== null) && runOptions.exitWithNonZeroCode !== undefined) {
|
||||
if (runOptions.exitWithNonZeroCode.storeLog) {
|
||||
storeLog = true;
|
||||
}
|
||||
if (runOptions.exitWithNonZeroCode.result === 'error') {
|
||||
executeResult = 'failed';
|
||||
} else if (runOptions.exitWithNonZeroCode.result === 'warning') {
|
||||
executeResult = 'warning';
|
||||
}
|
||||
if (cmdRet.code !== 0 || cmdRet.signal !== null) {
|
||||
executeResult = TaskResultStatus.Failure;
|
||||
const message = `Script return with result [${executeResult}] code [${cmdRet.code}] signal [${cmdRet.signal}] cwd [${options.cwd}]: ${scriptPath}`;
|
||||
if (runOptions.exitWithNonZeroCode.result === 'error') {
|
||||
logger.error(message, { show: storeLog });
|
||||
} else if (runOptions.exitWithNonZeroCode.result === 'warning') {
|
||||
logger.warn(message, { show: storeLog });
|
||||
}
|
||||
logger.log('cmderr', message);
|
||||
}
|
||||
return executeResult;
|
||||
}
|
||||
|
|
|
@ -13,49 +13,49 @@ export class CodeGeneration {
|
|||
}
|
||||
}
|
||||
@ObjectIdColumn()
|
||||
id: number;
|
||||
id: number;
|
||||
@Index({ unique: true })
|
||||
@Column()
|
||||
@IsNotEmpty()
|
||||
name: string;
|
||||
name: string;
|
||||
@Column()
|
||||
@IsNotEmpty()
|
||||
service: string;
|
||||
service: string;
|
||||
@Column()
|
||||
@IsNotEmpty()
|
||||
serviceType: string;
|
||||
serviceType: string;
|
||||
@Column()
|
||||
resourcesToGenerate: string;
|
||||
resourcesToGenerate: string;
|
||||
@Column()
|
||||
tag: string;
|
||||
tag: string;
|
||||
@Column()
|
||||
@IsNotEmpty()
|
||||
sdk: string;
|
||||
sdk: string;
|
||||
@Column()
|
||||
@IsNotEmpty()
|
||||
swaggerRepo: string;
|
||||
swaggerRepo: string;
|
||||
@Column()
|
||||
@IsNotEmpty()
|
||||
sdkRepo: string;
|
||||
sdkRepo: string;
|
||||
@Column()
|
||||
@IsNotEmpty()
|
||||
codegenRepo: string;
|
||||
codegenRepo: string;
|
||||
@Column()
|
||||
@IsNotEmpty()
|
||||
type: string;
|
||||
type: string;
|
||||
@Column()
|
||||
ignoreFailure: string;
|
||||
ignoreFailure: string;
|
||||
@Column()
|
||||
stages: string;
|
||||
stages: string;
|
||||
@Column({ default: '' })
|
||||
lastPipelineBuildID: string;
|
||||
lastPipelineBuildID: string;
|
||||
@Column()
|
||||
swaggerPR: string;
|
||||
swaggerPR: string;
|
||||
@Column()
|
||||
codePR: string;
|
||||
codePR: string;
|
||||
@Column()
|
||||
@IsNotEmpty()
|
||||
status: string;
|
||||
status: string;
|
||||
@Column({ default: '' })
|
||||
owner: string;
|
||||
owner: string;
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
export * from './scriptRunningState';
|
||||
export * from './taskBasicConfig';
|
||||
export * from './taskResult';
|
||||
export * from './taskInputAndOuputSchemaTypes';
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
const scriptRunningStateStrings = {
|
||||
/**
|
||||
* The process of running script has not yet begun.
|
||||
*/
|
||||
pending: `Pending`,
|
||||
/**
|
||||
* TThe process of running script is in-progress.
|
||||
*/
|
||||
inProgress: `In-Progress`,
|
||||
/**
|
||||
* TThe process of running script has failed.
|
||||
*/
|
||||
failed: `Failed`,
|
||||
/**
|
||||
* TThe process of running script has succeeded.
|
||||
*/
|
||||
succeeded: `Succeeded`,
|
||||
/**
|
||||
* TThe process of running script has warnings.
|
||||
*/
|
||||
warning: `Warning`
|
||||
};
|
||||
|
||||
export type scriptRunningState = keyof typeof scriptRunningStateStrings;
|
|
@ -19,62 +19,62 @@ export const taskBasicConfig = {
|
|||
sdkRepo: {
|
||||
default: '',
|
||||
env: 'SDK_REPO',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
configPath: {
|
||||
default: 'eng/codegen_to_sdk_config.json',
|
||||
env: 'CONFIG_PATH',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
pipelineId: {
|
||||
default: '',
|
||||
env: 'PIPELINE_ID',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
queuedAt: {
|
||||
default: '',
|
||||
env: 'QUEUE_AT',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
pipeLog: {
|
||||
default: '/tmp/sdk-generation/pipe.log',
|
||||
env: 'PIPE_LOG',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
pipeFullLog: {
|
||||
default: '/tmp/sdk-generation/pipe.full.log',
|
||||
env: 'PIPE_FULL_LOG',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
mockServerLog: {
|
||||
default: '',
|
||||
env: 'MOCK_SERVER_LOG',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
sdkGenerationName: {
|
||||
default: '',
|
||||
env: 'SDK_GENERATION_NAME',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
buildId: {
|
||||
default: '',
|
||||
env: 'BUILD_ID',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
taskName: {
|
||||
default: '',
|
||||
env: 'TASK_NAME',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
azureStorageBlobSasUrl: {
|
||||
default: '',
|
||||
env: 'AZURE_STORAGE_BLOB_SAS_URL',
|
||||
format: String,
|
||||
format: String
|
||||
},
|
||||
azureBlobContainerName: {
|
||||
default: 'sdks',
|
||||
env: 'AZURE_BLOB_CONTAINER_NAME',
|
||||
format: String,
|
||||
},
|
||||
format: String
|
||||
}
|
||||
};
|
||||
export const getTaskBasicConfig = convict<TaskBasicConfig>(taskBasicConfig);
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import * as path from 'path';
|
||||
|
||||
import { requireJsonc } from '../../utils/requireJsonc';
|
||||
import { getTypeTransformer } from '../../utils/validator';
|
||||
import * as path from 'path';
|
||||
|
||||
export const codegenToSdkConfigSchema = requireJsonc(path.join(__dirname, 'CodegenToSdkConfigSchema.json'));
|
||||
|
||||
|
@ -10,10 +11,6 @@ export type RunOptions = {
|
|||
envs?: string[];
|
||||
logPrefix?: string;
|
||||
logFilter?: LogFilter;
|
||||
exitWithNonZeroCode?: {
|
||||
storeLog: boolean;
|
||||
result: 'error' | 'warning' | 'ignore';
|
||||
};
|
||||
};
|
||||
|
||||
export type LogFilter = {
|
||||
|
|
|
@ -73,10 +73,9 @@
|
|||
"type": "string"
|
||||
},
|
||||
"envs": {
|
||||
// Not Implemented
|
||||
// Extra environment variable to be passed to the script (except initScript).
|
||||
// By default the following envs will be passed:
|
||||
// PATH, SHELL, PWD (current directory)
|
||||
// PWD (current directory)
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
|
@ -90,26 +89,6 @@
|
|||
"logFilter": {
|
||||
// filter for error msg and warning msg.
|
||||
"$ref": "#/definitions/LogFilter"
|
||||
},
|
||||
"exitWithNonZeroCode": {
|
||||
"properties": {
|
||||
// How should SDK Automation handle non-zero exitCode.
|
||||
"storeLog": {
|
||||
// Should we store this error.
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
"result": {
|
||||
// If script has non-error exitCode how should we mark the script's result.
|
||||
"type": "string",
|
||||
"enum": ["error", "warning", "ignore"],
|
||||
"default": "error"
|
||||
}
|
||||
},
|
||||
"storeAllLog": {
|
||||
"show": true,
|
||||
"result": "error"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": ["path"]
|
||||
|
|
|
@ -17,6 +17,5 @@
|
|||
"codeCoverage": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": ["total", "success", "fail", "apiCoverage", "codeCoverage"]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,25 +1,26 @@
|
|||
import * as fs from 'fs';
|
||||
import { Column, Entity, ObjectIdColumn } from 'typeorm';
|
||||
|
||||
import { getTaskBasicConfig, TaskBasicConfig } from './taskBasicConfig';
|
||||
import { GenerateAndBuildOutput } from './taskInputAndOuputSchemaTypes/GenerateAndBuildOutput';
|
||||
import { InitOutput } from './taskInputAndOuputSchemaTypes/InitOutput';
|
||||
import { TestOutput } from './taskInputAndOuputSchemaTypes/TestOutput';
|
||||
import { getTaskBasicConfig, TaskBasicConfig } from './taskBasicConfig';
|
||||
import * as fs from 'fs';
|
||||
import { Column, Entity, ObjectIdColumn } from 'typeorm';
|
||||
|
||||
@Entity('sdkGenerationResults')
|
||||
export class TaskResultEntity {
|
||||
@ObjectIdColumn()
|
||||
id: string;
|
||||
id: string;
|
||||
@Column()
|
||||
key: string;
|
||||
key: string;
|
||||
@Column()
|
||||
pipelineBuildId: string;
|
||||
pipelineBuildId: string;
|
||||
@Column()
|
||||
taskResult: TaskResult;
|
||||
taskResult: TaskResult;
|
||||
}
|
||||
|
||||
export enum TaskResultStatus {
|
||||
success = 'succeeded',
|
||||
failure = 'failed',
|
||||
Success = 'succeeded',
|
||||
Failure = 'failed',
|
||||
}
|
||||
|
||||
export type Extra = {
|
||||
|
@ -101,9 +102,9 @@ export function setTaskResult(config: TaskBasicConfig, taskName: string) {
|
|||
taskResult = {
|
||||
name: taskName,
|
||||
pipelineBuildId: '',
|
||||
result: TaskResultStatus.success,
|
||||
result: TaskResultStatus.Success,
|
||||
errorCount: 0,
|
||||
warningCount: 0,
|
||||
warningCount: 0
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -119,18 +120,18 @@ export function generateTotalResult(taskResults: TaskResult[], pipelineBuildId:
|
|||
const totalResult: TaskResult = {
|
||||
name: 'total',
|
||||
pipelineBuildId: pipelineBuildId,
|
||||
result: TaskResultStatus.success,
|
||||
result: TaskResultStatus.Success,
|
||||
errorCount: 0,
|
||||
messages: [],
|
||||
messages: []
|
||||
};
|
||||
|
||||
if (taskResults.length === 0) {
|
||||
totalResult.result = TaskResultStatus.failure;
|
||||
totalResult.result = TaskResultStatus.Failure;
|
||||
return totalResult;
|
||||
}
|
||||
|
||||
for (const taskResult of taskResults) {
|
||||
if (taskResult.result !== TaskResultStatus.success) {
|
||||
if (taskResult.result !== TaskResultStatus.Success) {
|
||||
totalResult.result = taskResult.result;
|
||||
}
|
||||
totalResult.errorCount += taskResult.errorCount;
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License. See License in the project root for license information.
|
||||
import { BlobServiceClient } from "@azure/storage-blob";
|
||||
import { BlobServiceClient } from '@azure/storage-blob';
|
||||
|
||||
import { logger } from "../logger";
|
||||
import { logger } from '../logger';
|
||||
|
||||
export class AzureBlobClient {
|
||||
private blobServiceClient: BlobServiceClient;
|
||||
|
|
|
@ -1 +1 @@
|
|||
export * from "./AzureBlobClient";
|
||||
export * from './AzureBlobClient';
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { Connection, MongoRepository } from 'typeorm';
|
||||
|
||||
import { CodeGenerationDao } from './codeGenerationDao';
|
||||
import { CodeGeneration } from '../../types/codeGeneration';
|
||||
import { CodeGenerationDao } from './codeGenerationDao';
|
||||
|
||||
export class CodeGenerationDaoImpl implements CodeGenerationDao {
|
||||
private repo: MongoRepository<CodeGeneration>;
|
||||
|
@ -40,7 +40,7 @@ export class CodeGenerationDaoImpl implements CodeGenerationDao {
|
|||
await this.repo.delete(codegen);
|
||||
}
|
||||
|
||||
/*Get all code generations of an special onboard type. */
|
||||
/* Get all code generations of an special onboard type. */
|
||||
public async listCodeGenerations(filters: any = undefined, filterCompleted = false): Promise<CodeGeneration[]> {
|
||||
let finalFilters: any;
|
||||
if (!filters) {
|
||||
|
@ -48,7 +48,7 @@ export class CodeGenerationDaoImpl implements CodeGenerationDao {
|
|||
}
|
||||
if (filterCompleted) {
|
||||
finalFilters = {
|
||||
where: { $and: [{ status: { $ne: 'completed' } }, { status: { $ne: 'pipelineCompleted' } }, filters] },
|
||||
where: { $and: [{ status: { $ne: 'completed' } }, { status: { $ne: 'pipelineCompleted' } }, filters] }
|
||||
};
|
||||
} else {
|
||||
finalFilters = filters;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
export * from "./codeGenerationDao";
|
||||
export * from "./codeGenerationDaoImpl";
|
||||
export * from "./taskResultDao";
|
||||
export * from "./taskResultDaoImpl";
|
||||
export * from './codeGenerationDao';
|
||||
export * from './codeGenerationDaoImpl';
|
||||
export * from './taskResultDao';
|
||||
export * from './taskResultDaoImpl';
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import { TaskResultDao } from './taskResultDao';
|
||||
import { TaskResult, TaskResultEntity } from '../../types/taskResult';
|
||||
import { Connection, MongoRepository } from 'typeorm';
|
||||
|
||||
import { TaskResult, TaskResultEntity } from '../../types/taskResult';
|
||||
import { TaskResultDao } from './taskResultDao';
|
||||
|
||||
export class TaskResultDaoImpl implements TaskResultDao {
|
||||
private repo: MongoRepository<TaskResultEntity>;
|
||||
|
||||
|
@ -11,7 +12,7 @@ export class TaskResultDaoImpl implements TaskResultDao {
|
|||
|
||||
public async getFromBuild(pipelineBuildId: string): Promise<TaskResult[]> {
|
||||
const taskResults: TaskResultEntity[] = await this.repo.find({
|
||||
pipelineBuildId: pipelineBuildId,
|
||||
pipelineBuildId: pipelineBuildId
|
||||
});
|
||||
const results: TaskResult[] = [];
|
||||
for (const taskResult of taskResults) {
|
||||
|
@ -27,7 +28,7 @@ export class TaskResultDaoImpl implements TaskResultDao {
|
|||
{
|
||||
key: key,
|
||||
pipelineBuildId: pipelineBuildId,
|
||||
taskResult: taskResult,
|
||||
taskResult: taskResult
|
||||
},
|
||||
{ upsert: true }
|
||||
);
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
// Licensed under the MIT License. See License in the project root for license information.
|
||||
import {
|
||||
CreateBatchOptions,
|
||||
EventHubProducerClient,
|
||||
EventDataBatch,
|
||||
} from "@azure/event-hubs";
|
||||
EventHubProducerClient
|
||||
} from '@azure/event-hubs';
|
||||
|
||||
import { logger } from "../logger";
|
||||
import { logger } from '../logger';
|
||||
|
||||
export class EventHubProducer {
|
||||
private producer: EventHubProducerClient;
|
||||
|
@ -23,7 +23,7 @@ export class EventHubProducer {
|
|||
return await this.producer.createBatch(batchOptions);
|
||||
}
|
||||
|
||||
private async *getBatchIterator(events: string[], partitionKey?: string) {
|
||||
private async* getBatchIterator(events: string[], partitionKey?: string) {
|
||||
let toAddIndex = 0;
|
||||
if (toAddIndex >= events.length) {
|
||||
return;
|
||||
|
@ -56,19 +56,19 @@ export class EventHubProducer {
|
|||
let next = await batchIterator.next();
|
||||
while (!next.done) {
|
||||
if (next.value !== undefined) {
|
||||
let batch: EventDataBatch = next.value as EventDataBatch;
|
||||
const batch: EventDataBatch = next.value as EventDataBatch;
|
||||
await this.producer.sendBatch(batch);
|
||||
}
|
||||
next = await batchIterator.next();
|
||||
}
|
||||
logger.info("Send events done");
|
||||
logger.info('Send events done');
|
||||
}
|
||||
|
||||
public async close() {
|
||||
try {
|
||||
await this.producer.close();
|
||||
} catch (err) {
|
||||
logger.error("Error when closing client: ", err);
|
||||
logger.error('Error when closing client: ', err);
|
||||
} // swallow the error
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
export * from "./EventHubProducer";
|
||||
export * from './EventHubProducer';
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
export * from "./logger";
|
||||
export * from "./requireJsonc";
|
||||
export * from "./validator";
|
||||
export * from "./blob";
|
||||
export * from "./db";
|
||||
export * from "./eventhub";
|
||||
export * from "./metric";
|
||||
export * from './logger';
|
||||
export * from './requireJsonc';
|
||||
export * from './validator';
|
||||
export * from './blob';
|
||||
export * from './db';
|
||||
export * from './eventhub';
|
||||
export * from './metric';
|
||||
|
|
|
@ -1,86 +1,83 @@
|
|||
import * as winston from 'winston';
|
||||
import { getTaskBasicConfig, TaskBasicConfig } from '../types/taskBasicConfig';
|
||||
import { createLogger, format, Logger, transports } from 'winston';
|
||||
import { FileTransportInstance } from 'winston/lib/winston/transports';
|
||||
|
||||
function getLogger() {
|
||||
const config: TaskBasicConfig = getTaskBasicConfig.getProperties();
|
||||
const sdkAutoLogLevels = {
|
||||
levels: {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
section: 5, // Log as azure devops section
|
||||
command: 6, // Running a command
|
||||
cmdout: 7, // Command stdout
|
||||
cmderr: 8, // Command stdout
|
||||
info: 15,
|
||||
endsection: 20,
|
||||
debug: 50,
|
||||
},
|
||||
colors: {
|
||||
error: 'red',
|
||||
warn: 'yellow',
|
||||
info: 'green',
|
||||
cmdout: 'green underline',
|
||||
cmderr: 'yellow underline',
|
||||
section: 'magenta bold',
|
||||
endsection: 'magenta bold',
|
||||
command: 'cyan bold',
|
||||
debug: 'blue',
|
||||
},
|
||||
};
|
||||
import { getTaskBasicConfig } from '../types';
|
||||
|
||||
const logger = winston.createLogger({
|
||||
levels: sdkAutoLogLevels.levels,
|
||||
const loggerLevels = {
|
||||
levels: {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
cmdout: 2,
|
||||
cmderr: 3,
|
||||
info: 4,
|
||||
debug: 5
|
||||
},
|
||||
colors: {
|
||||
error: 'red',
|
||||
warn: 'yellow',
|
||||
cmdout: 'green underline',
|
||||
cmderr: 'yellow underline',
|
||||
info: 'green',
|
||||
debug: 'blue'
|
||||
}
|
||||
};
|
||||
|
||||
type WinstonInfo = {
|
||||
level: keyof typeof loggerLevels.levels;
|
||||
message: string;
|
||||
timestamp: string;
|
||||
};
|
||||
|
||||
const fileTransportInstances: {
|
||||
[key: string]: FileTransportInstance
|
||||
} = {};
|
||||
|
||||
export function addFileLog(logger: Logger, logPath: string, taskName: string) {
|
||||
const fileTransportInstance = new transports.File({
|
||||
level: 'info',
|
||||
filename: logPath,
|
||||
options: { flags: 'w' },
|
||||
format: format.combine(
|
||||
format.timestamp({ format: 'YYYY-MM-DD hh:mm:ss' }),
|
||||
format.printf((info: WinstonInfo) => {
|
||||
const msg = `${info.timestamp} ${info.level} \t${info.message}`;
|
||||
return msg;
|
||||
})
|
||||
)
|
||||
});
|
||||
fileTransportInstances[taskName] = fileTransportInstance;
|
||||
logger.add(fileTransportInstance);
|
||||
}
|
||||
|
||||
export function removeFileLog(logger: Logger, taskName: string) {
|
||||
if (!fileTransportInstances[taskName]) {
|
||||
throw new Error(`Try to remove non-existed logger transport: ${taskName}`);
|
||||
}
|
||||
logger.remove(fileTransportInstances[taskName]);
|
||||
}
|
||||
|
||||
export function initializeLogger(logPath: string, taskName: string, addConsoleLog: boolean = true): Logger {
|
||||
const logger = createLogger({
|
||||
levels: loggerLevels.levels
|
||||
});
|
||||
|
||||
type WinstonInfo = {
|
||||
level: keyof typeof sdkAutoLogLevels.levels;
|
||||
message: string;
|
||||
timestamp: string;
|
||||
storeLog?: boolean;
|
||||
};
|
||||
addFileLog(logger, logPath, taskName);
|
||||
|
||||
logger.add(
|
||||
new winston.transports.File({
|
||||
if (addConsoleLog) {
|
||||
logger.add(new transports.Console({
|
||||
level: 'info',
|
||||
filename: config.pipeFullLog,
|
||||
options: { flags: 'w' },
|
||||
format: winston.format.combine(
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD hh:mm:ss' }),
|
||||
winston.format.printf((info: WinstonInfo) => {
|
||||
format: format.combine(
|
||||
format.colorize({ colors: loggerLevels.colors }),
|
||||
format.timestamp({ format: 'YYYY-MM-DD hh:mm:ss' }),
|
||||
format.printf((info: WinstonInfo) => {
|
||||
const msg = `${info.timestamp} ${info.level} \t${info.message}`;
|
||||
return msg;
|
||||
})
|
||||
),
|
||||
})
|
||||
);
|
||||
|
||||
logger.add(
|
||||
new winston.transports.Console({
|
||||
level: 'endsection',
|
||||
format: winston.format.combine(
|
||||
winston.format.colorize({ colors: sdkAutoLogLevels.colors }),
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD hh:mm:ss' }),
|
||||
winston.format.printf((info: WinstonInfo) => {
|
||||
const { level } = info;
|
||||
let msg = `${info.timestamp} ${info.level} \t${info.message}`;
|
||||
switch (level) {
|
||||
case 'error':
|
||||
case 'debug':
|
||||
case 'command':
|
||||
msg = `##[${level}] ${msg}`;
|
||||
case 'warn':
|
||||
msg = `##[warning] ${msg}`;
|
||||
case 'section':
|
||||
msg = `##[group] ${info.message}`;
|
||||
case 'endsection':
|
||||
msg = `##[endgroup] ${info.message}`;
|
||||
}
|
||||
return msg;
|
||||
})
|
||||
),
|
||||
})
|
||||
);
|
||||
)
|
||||
}));
|
||||
}
|
||||
return logger;
|
||||
}
|
||||
|
||||
export const logger: winston.Logger = getLogger();
|
||||
// export a default logger, which can be used by pipeline commands
|
||||
export const logger = initializeLogger(getTaskBasicConfig?.getProperties()?.pipeFullLog, 'pipeline');
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License. See License in the project root for license information.
|
||||
import * as statsd from "hot-shots";
|
||||
import * as statsd from 'hot-shots';
|
||||
|
||||
export enum Metrics {
|
||||
Liveness = "liveness",
|
||||
ApiCalls = "apiCalls",
|
||||
InternalServerError = "InternalServerError",
|
||||
BadRequest = "BadRequest",
|
||||
NotFound = "NotFound",
|
||||
Success = "success",
|
||||
Liveness = 'liveness',
|
||||
ApiCalls = 'apiCalls',
|
||||
InternalServerError = 'InternalServerError',
|
||||
BadRequest = 'BadRequest',
|
||||
NotFound = 'NotFound',
|
||||
Success = 'success',
|
||||
}
|
||||
|
||||
export class MonitorClient {
|
||||
|
@ -17,7 +17,7 @@ export class MonitorClient {
|
|||
this.stats = new statsd.StatsD({
|
||||
host: host,
|
||||
port: port,
|
||||
mock: mock,
|
||||
mock: mock
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ export class MonitorClient {
|
|||
const stat = JSON.stringify({
|
||||
Namespace: serviceName,
|
||||
Metric: metric,
|
||||
Dims: dims,
|
||||
Dims: dims
|
||||
});
|
||||
this.stats.gauge(stat, value);
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
export * from "./MonitorClient";
|
||||
export * from './MonitorClient';
|
||||
|
|
|
@ -2,7 +2,7 @@ import * as fs from 'fs';
|
|||
import { parse } from 'jsonc-parser';
|
||||
|
||||
export const requireJsonc = (path: string) => {
|
||||
const contentStr = fs.readFileSync(path).toString();
|
||||
const content = parse(contentStr);
|
||||
return content;
|
||||
const contentStr = fs.readFileSync(path).toString();
|
||||
const content = parse(contentStr);
|
||||
return content;
|
||||
};
|
||||
|
|
|
@ -1,25 +1,25 @@
|
|||
import {ValidateFunction} from "ajv";
|
||||
import { ValidateFunction } from 'ajv';
|
||||
|
||||
const Ajv = require("ajv")
|
||||
const ajvInstance = require('ajv');
|
||||
|
||||
const ajv = new Ajv({
|
||||
coerceTypes: true,
|
||||
messages: true,
|
||||
verbose: true,
|
||||
useDefaults: true
|
||||
const ajv = new ajvInstance({
|
||||
coerceTypes: true,
|
||||
messages: true,
|
||||
verbose: true,
|
||||
useDefaults: true
|
||||
});
|
||||
|
||||
export const getTypeTransformer = <T>(schema: object, name: string) => {
|
||||
let validator: ValidateFunction | undefined;
|
||||
return (obj: unknown) => {
|
||||
if (validator === undefined) {
|
||||
validator = ajv.compile(schema);
|
||||
}
|
||||
if (!validator(obj)) {
|
||||
const error = validator.errors![0];
|
||||
throw new Error(`Invalid ${name}: ${error.dataPath} ${error.message}`);
|
||||
}
|
||||
let validator: ValidateFunction | undefined;
|
||||
return (obj: unknown) => {
|
||||
if (validator === undefined) {
|
||||
validator = ajv.compile(schema);
|
||||
}
|
||||
if (!validator(obj)) {
|
||||
const error = validator.errors![0];
|
||||
throw new Error(`Invalid ${name}: ${error.dataPath} ${error.message}`);
|
||||
}
|
||||
|
||||
return obj as T;
|
||||
};
|
||||
return obj as T;
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { Connection, createConnection } from 'typeorm';
|
||||
|
||||
import { CodeGeneration } from '../../src/types/codeGeneration';
|
||||
import { CodeGenerationDao } from '../../src/utils/db/codeGenerationDao';
|
||||
import { CodeGenerationDaoImpl } from '../../src/utils/db/codeGenerationDaoImpl';
|
||||
import { CodeGeneration } from '../../src/types/codeGeneration';
|
||||
|
||||
let mongoDbConnection: Connection;
|
||||
|
||||
|
@ -17,7 +17,7 @@ async function initDaoTest() {
|
|||
database: 'admin',
|
||||
synchronize: true,
|
||||
logging: true,
|
||||
entities: [CodeGeneration],
|
||||
entities: [CodeGeneration]
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import * as fs from 'fs';
|
||||
|
||||
import { createTaskResult, parseGenerateLog, spliteLog } from '../../src/lib/generateResult';
|
||||
import { AzureSDKTaskName } from '../../src/types/commonType';
|
||||
import { spliteLog, parseGenerateLog, createTaskResult } from '../../src/lib/generateResult';
|
||||
import { TaskResultCommon, TaskResult, TestTaskResult } from '../../src/types/taskResult';
|
||||
import { TaskResult, TaskResultCommon, TestTaskResult } from '../../src/types/taskResult';
|
||||
|
||||
test('spliteLog', async () => {
|
||||
// Standard use case: single line
|
||||
|
@ -30,7 +30,7 @@ test('parseGenerateLog', async () => {
|
|||
fs.unlinkSync(parseGenerateLogTestFile);
|
||||
}
|
||||
fs.writeFileSync(parseGenerateLogTestFile, correctStr, {
|
||||
encoding: 'utf-8',
|
||||
encoding: 'utf-8'
|
||||
});
|
||||
const correctResult: TaskResultCommon = parseGenerateLog('testId', 'init', parseGenerateLogTestFile, undefined);
|
||||
expect(correctResult.name).toBe('init');
|
||||
|
@ -53,7 +53,7 @@ test('createTaskResult', async () => {
|
|||
fs.unlinkSync(createTaskResultTestFile);
|
||||
}
|
||||
fs.writeFileSync(createTaskResultTestFile, correctStr, {
|
||||
encoding: 'utf-8',
|
||||
encoding: 'utf-8'
|
||||
});
|
||||
|
||||
const correctResult: TaskResult = createTaskResult(
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
#!/bin/sh
|
||||
|
||||
SPEC_REPO=/spec-repo
|
||||
WORK_DIR=/work-dir
|
||||
SDK_REPO=/sdk-repo
|
||||
|
||||
if [ -d "${SPEC_REPO}" ]; then
|
||||
while true
|
||||
do
|
||||
if [ -f "/tmp/notExit" ]; then
|
||||
USER_GROUP_ID=`stat -c "%u:%g" ${SPEC_REPO}`
|
||||
if [ -d "${WORK_DIR}" ]; then
|
||||
chown -R ${USER_GROUP_ID} ${WORK_DIR}
|
||||
fi
|
||||
if [ -d "${SDK_REPO}" ]; then
|
||||
chown -R ${USER_GROUP_ID} ${SDK_REPO}
|
||||
fi
|
||||
fi
|
||||
sleep 5s
|
||||
done
|
||||
else
|
||||
echo "Error: '${SPEC_REPO}' NOT found."
|
||||
exit 1
|
||||
fi
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
dockerd > /dev/null 2>&1 &
|
||||
sh /change-owner.sh &
|
||||
run-mock-host "$@" &
|
||||
docker-cli "$@"
|
||||
if [ -f "/tmp/notExit" ]; then
|
||||
bash
|
||||
fi
|
|
@ -0,0 +1,16 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
commit_sha="da15b6fd3ef856477bf6f4fb29ba1b7af717770d"
|
||||
archive="vscode-server-linux-x64.tar.gz"
|
||||
|
||||
# Download VS Code Server tarball to tmp directory.
|
||||
curl -L "https://update.code.visualstudio.com/commit:${commit_sha}/server-linux-x64/stable" -o "/tmp/${archive}"
|
||||
|
||||
mkdir -vp ~/.vscode-server/bin/"${commit_sha}"
|
||||
|
||||
tar --no-same-owner -xzv --strip-components=1 -C ~/.vscode-server/bin/"${commit_sha}" -f "/tmp/${archive}"
|
||||
|
||||
sh /root/.vscode-server/bin/${commit_sha}/bin/code-server --install-extension vscjava.vscode-java-pack
|
||||
sh /root/.vscode-server/bin/${commit_sha}/bin/code-server --install-extension ms-dotnettools.csharp
|
||||
sh /root/.vscode-server/bin/${commit_sha}/bin/code-server --install-extension ms-python.python
|
|
@ -0,0 +1,3 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
docker-cli "$@"
|
Загрузка…
Ссылка в новой задаче