* update

* Update CLITest.yml

* update

* Update send_to_kusto.py

* Update send_to_kusto.py

* Update send_to_kusto.py

* update

* Update send_to_kusto.py

* update

* update

* update

* update

* update

* update

* Update clean.py

* Update

* Update CLITest.yml

* Update clean.py

* Update send_to_kusto.py

* Update generate_index.py

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* update

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* update

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update CLITest.yml

* Update clean.py

* Update CLITest.yml

* Update CLITest.yml

* Update sendemail.py

* Update CLITest.yml

* update
This commit is contained in:
ZelinWang 2023-08-03 13:50:34 +08:00 коммит произвёл GitHub
Родитель 9d3d3c03fb
Коммит b5ab35203e
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
6 изменённых файлов: 2637 добавлений и 830 удалений

Просмотреть файл

@ -2,3 +2,4 @@ variables:
ubuntu_pool: 'pool-ubuntu-2004'
windows_pool: 'pool-windows-2019'
ubuntu_arm64_pool: 'ubuntu-arm64-2004-pool'
macos_pool: 'macOS-12'

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -3,77 +3,322 @@
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Azure Killer
# version 0.1
# Clean Azure resources automatically
# Feiyue Yu
import os
import json
# version 1.0
import datetime
# Please set parameters
# Subscription ID
subscription = '0b1f6471-1bf0-4dda-aec3-cb9272f09590'
# Prefix of resource group that will be deleted
prefixes = ['cli_test', 'clitest']
# Maximum survival time, in days
TTL = 1
import json
import os
import subprocess
import time
from tqdm import tqdm
def main():
print('Azure Killer, version 0.1')
print('Configuration:')
print(' Subscription: ' + subscription)
# print(' Resource group prefix: ' + str(prefixes))
# print(' Maximum survival time: %d days' % TTL)
print()
cmd = 'az group list --subscription %s --query [].name'
result = os.popen(cmd % subscription).read()
rgs = json.loads(result)
for rg in rgs:
clean_rg(rg)
print('Azure cli resource clean up: version 1.0')
clean_lock()
clean_sig()
clean_storage()
clean_servicebus()
clean_backup()
clean_deleted_keyvault()
def clean_rg(rg):
"""
Clean resource group.
:param rg: Resource group name
:return:
"""
print('Processing resource group: ' + rg)
cmd = 'az group delete -y -g %s --subscription %s' % (rg, subscription)
def clean_lock():
print('Clean lock')
cmd = ['az', 'lock', 'list', '--query', '[][id, name, resourceGroup]']
print(cmd)
os.popen(cmd)
out = subprocess.run(cmd, capture_output=True)
locks = json.loads(out.stdout)
print(locks)
cmd = ['az', 'group', 'list', '--tag', 'product=azurecli', '--query', '[].name']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
cli_test_resoure_groups = json.loads(out.stdout)
print(cli_test_resoure_groups)
for resource_id, lock_name, rg in tqdm(locks):
if rg in cli_test_resoure_groups:
cmd = f'az lock delete --name {lock_name} --resource-group {rg}'
print(cmd)
result = os.popen(cmd).read()
print(result)
cmd = ['az', 'lock', 'list', '--query', '[][id, name, resourceGroup]']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
locks = json.loads(out.stdout)
print(locks)
for resource_id, lock_name, rg in tqdm(locks):
if rg in cli_test_resoure_groups:
resource_id = resource_id.split('providers')[1].split('/')
resource_name = resource_id[3]
resource_type = "/".join(resource_id[1:3])
cmd = f'az lock delete --name {lock_name} --resource-group {rg} --resource {resource_name} --resource-type {resource_type}'
print(cmd)
result = os.popen(cmd).read()
print(result)
def old_enough(dates):
"""
Whether it's old enough.
:param dates: Array of dates
:return: bool
"""
if not dates:
print('Duration: too old')
return True
date = dates[-1]
date = datetime.datetime.strptime(date, '%Y-%m-%dT%H:%M:%S.%f+00:00')
now = datetime.datetime.utcnow()
duration = now - date
print('Duration: ' + str(duration))
return duration.days > TTL
def clean_sig():
print('Clean sig')
# Gallery application still has gallery application versions.
skip_grous = ['GALLERYAPP-TEST', 'CLITEST.RGZBEBLKTTJHO7IVJUGYWRRDO434XMUXDOAVDDSBGIMM67257RGJ55TQCILNERPAQWU']
cmd = f'az sig list --query [].id'
print(cmd)
sig_list = json.loads(os.popen(cmd).read())
print(sig_list)
for sig_id in tqdm(sig_list):
rg = sig_id.split('/')[4]
if rg in skip_grous:
continue
gallery_name = sig_id.split('/')[8]
cmd = ['az', 'lock', 'list', '-g', rg]
print(cmd)
out = subprocess.run(cmd, capture_output=True)
# skip the resource group when get a lock
# b'[]\r\n'
locks = json.loads(out.stdout)
print(locks)
if locks:
continue
cmd = f'az sig share reset --ids {sig_id}'
result = os.popen(cmd).read()
print(result)
cmd = ['az', 'sig', 'gallery-application', 'create', '--gallery-name', gallery_name, '--name', 'AppName', '-g',
rg, '--os-type', 'windows']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
print(json.loads(out.stdout))
cmd = ['az', 'sig', 'gallery-application', 'list', '--gallery-name', gallery_name, '-g', rg, '--query',
'[].name']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
app_names = json.loads(out.stdout)
print(app_names)
for name in app_names:
if name != 'AppName':
cmd = ['az', 'sig', 'gallery-application', 'create', '--gallery-name', gallery_name, '--name', name,
'-g', rg, '--os-type', 'windows']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
print(json.loads(out.stdout))
for name in app_names:
cmd = ['az', 'sig', 'gallery-application', 'delete', '--gallery-name', gallery_name, '--name', name, '-g',
rg, '--yes']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
if out.returncode != 0:
print(out.stderr)
for sig_id in tqdm(sig_list):
rg = sig_id.split('/')[4]
if rg in skip_grous:
continue
cmd = ['az', 'lock', 'list', '-g', rg]
print(cmd)
out = subprocess.run(cmd, capture_output=True)
# skip the resource group when get a lock
# b'[]\r\n'
locks = json.loads(out.stdout)
print(locks)
if locks:
continue
cmd = f'az group delete -n {rg} --yes'
result = os.popen(cmd).read()
print(result)
def target_rg(rg):
"""
Whether rg has certain prefix.
:param rg: Resource group name
:return: bool
"""
return any(rg.startswith(prefix) for prefix in prefixes)
def clean_storage():
print('Clean storage')
skip_grous = []
cmd = ['az', 'storage', 'account', 'list', '--query', '[][name, resourceGroup]']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
accounts = json.loads(out.stdout)
print(accounts)
for account, rg in tqdm(accounts):
delete_group = True
if rg in skip_grous:
continue
cmd = ['az', 'lock', 'list', '-g', rg]
print(cmd)
out = subprocess.run(cmd, capture_output=True)
# skip the resource group when get a lock
# b'[]\r\n'
locks = json.loads(out.stdout)
print(locks)
if locks:
continue
cmd = ['az', 'storage', 'account', 'keys', 'list', '--account-name', account, '--query', '[].value']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
keys = json.loads(out.stdout) if out.stdout else []
if not keys:
continue
account_key = keys[0]
cmd = ['az', 'storage', 'container', 'list', '--account-name', account, '--account-key', account_key, '--query',
'[].name']
out = subprocess.run(cmd, capture_output=True)
containers = json.loads(out.stdout) if out.stdout else []
print(containers)
if not containers:
continue
for container in containers:
cmd = f'az storage blob delete-batch --account-name {account} --account-key {account_key} --source {container}'
result = os.popen(cmd).read()
print(result)
cmd = ['az', 'storage', 'container-rm', 'delete', '--storage-account', account, '--name', container, '--yes']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
if out.returncode != 0:
print(out.stderr)
delete_group = False
break
if delete_group:
cmd = f'az group delete -n {rg} --yes'
print(cmd)
result = os.popen(cmd).read()
print(result)
def clean_servicebus():
print('Clean servicebus')
skip_grous = []
cmd = ['az', 'group', 'list', '--query', '[].name']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
resource_groups = json.loads(out.stdout)
print(resource_groups)
for resource_group in tqdm(resource_groups):
if resource_group in skip_grous:
continue
if resource_group.startswith('cli_test_sb_migration'):
cmd = ['az', 'servicebus', 'namespace', 'list', '--resource-group', resource_group, '--query',
'[][name, resourceGroup, sku.name]']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
servicebus_list = json.loads(out.stdout)
print(servicebus_list)
for name, rg, sku in tqdm(servicebus_list):
if sku == 'Standard':
cmd = ['az', 'servicebus', 'migration', 'abort', '--resource-group', rg, '--name', name]
print(cmd)
out = subprocess.run(cmd, capture_output=True)
result = out.stdout
print(result)
time.sleep(180)
cmd = f'az group delete -n {rg} --yes'
print(cmd)
result = os.popen(cmd).read()
print(result)
def clean_backup():
print('Clean backup')
skip_grous = ['myResourceGroup', 'clitest.rgvt3xx3e4uwhbuq3pmtkf72fl674usgxlhezwreh6vdf4jbsvnf4pwohlb7hyyj6qy']
cmd = ['az', 'backup', 'vault', 'list', '--query', '[][name, resourceGroup]']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
backup_vaults = json.loads(out.stdout)
print(backup_vaults)
for vault, resource_group in tqdm(backup_vaults):
cmd = ['az', 'lock', 'list', '-g', resource_group]
print(cmd)
out = subprocess.run(cmd, capture_output=True)
# skip the resource group when get a lock
# b'[]\r\n'
locks = json.loads(out.stdout)
print(locks)
if locks:
continue
if resource_group in skip_grous:
continue
cmd = ['az', 'backup', 'item', 'list', '-v', vault, '--resource-group', resource_group, '--query',
'[][properties.friendlyName, properties.backupManagementType, containerName]']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
items = json.loads(out.stdout)
print(items)
for item, item_type, container in items:
if container:
container = container.split(';')[-1]
else:
container = item
cmd = ['az', 'backup', 'protection', 'disable', '--container-name', container, '--backup-management-type',
item_type, '--delete-backup-data', 'true', '--item-name', item, '--resource-group', resource_group,
'--vault-name', vault, '--yes']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
print(out.stdout)
cmd = ['az', 'backup', 'container', 'show', '--name', container, '--resource-group',
resource_group, '--vault-name', vault, '--backup-management-type', item_type]
print(cmd)
out = subprocess.run(cmd, capture_output=True)
print(out.stdout)
cmd = ['az', 'backup', 'container', 'unregister', '--container-name', container,
'--resource-group', resource_group, '--vault-name', vault, '--backup-management-type', item_type,
'--yes']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
print(out.stdout)
cmd = ['az', 'backup', 'policy', 'list', '--resource-group', resource_group, '--vault-name', vault, '--query',
'[].id']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
policy_ids = json.loads(out.stdout)
if policy_ids:
cmd = ['az', 'backup', 'policy', 'delete', '--ids']
cmd.extend(policy_ids)
print(cmd)
out = subprocess.run(cmd, capture_output=True)
print(out.stdout)
cmd = f'az group delete -n {resource_group} --yes'
print(cmd)
result = os.popen(cmd).read()
print(result)
def clean_deleted_keyvault():
cmd = ['az', 'keyvault', 'list-deleted', '--query', '[][name, properties.scheduledPurgeDate, type]']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
deleted_keyvaults = json.loads(out.stdout)
count = 0
for name, scheduledPurgeDate, keyvault_type in tqdm(deleted_keyvaults):
if scheduledPurgeDate <= datetime.datetime.now().isoformat():
if keyvault_type == 'Microsoft.KeyVault/deletedVaults':
cmd = ['az', 'keyvault', 'purge', '--name', name, '--no-wait']
elif keyvault_type == 'Microsoft.KeyVault/deletedManagedHSMs':
cmd = ['az', 'keyvault', 'purge', '--hsm-name', name, '--no-wait']
else:
continue
print(cmd)
count += 1
out = subprocess.run(cmd, capture_output=True)
print(out.stdout)
print(count)
def clean_resource_group():
skip_grous = []
cmd = ['az', 'group', 'list', '--query', '[].name']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
rgs = json.loads(out.stdout) if out.stdout else []
for rg in rgs:
if rg in skip_grous:
continue
cmd = ['az', 'lock', 'list', '-g', rg]
print(cmd)
out = subprocess.run(cmd, capture_output=True)
# skip the resource group when get a lock
# b'[]\r\n'
locks = json.loads(out.stdout)
print(locks)
if locks:
continue
cmd = ['az', 'group', 'delete', '-n', rg, '--yes']
print(cmd)
out = subprocess.run(cmd, capture_output=True)
print(out.stdout)
if __name__ == '__main__':

Просмотреть файл

@ -6,6 +6,7 @@
"""
Generate index.html of testing results HTML pages.
"""
import datetime
import traceback
import os
import re
@ -17,7 +18,7 @@ import logging
logger = logging.getLogger(__name__)
def generate(container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE, USER_TARGET):
def generate(container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE, USER_TARGET, ACCOUNT_KEY, USER_REPO_EXT, USER_BRANCH_EXT):
"""
Generate index.html. Upload it to storage account
:param container:
@ -45,18 +46,21 @@ def generate(container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_
data.append({'name': name, 'url': url})
break
logger.warning(data)
html = render(data, container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE)
html = render(data, container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE, USER_REPO_EXT, USER_BRANCH_EXT)
with open('index.html', 'w') as f:
f.write(html)
# Upload to storage account
cmd = 'az storage blob upload -f index.html -c {} -n index.html --account-name clitestresultstac --overwrite'.format(container)
cmd = 'az storage blob upload -f index.html -c {} -n index.html --account-name clitestresultstac --account-key {} --overwrite'.format(container, ACCOUNT_KEY)
logger.warning('Running: ' + cmd)
os.system(cmd)
# Upload to latest container if it is a full live test of official repo dev branch
if USER_REPO == 'https://github.com/Azure/azure-cli.git' and USER_BRANCH == 'dev' and USER_TARGET == '' and USER_LIVE == '--live':
cmd = 'az storage blob upload -f index.html -c latest -n index.html --account-name clitestresultstac --overwrite'
if USER_TARGET.lower() in ['all', ''] \
and USER_REPO == 'https://github.com/Azure/azure-cli.git' \
and USER_REPO_EXT == 'https://github.com/Azure/azure-cli-extensions.git' \
and USER_BRANCH == 'dev' and USER_BRANCH_EXT == 'main' and USER_LIVE == '--live':
cmd = 'az storage blob upload -f index.html -c latest -n index.html --account-name clitestresultstac --account-key {} --overwrite'.format(ACCOUNT_KEY)
logger.warning('Running: ' + cmd)
os.system(cmd)
@ -64,7 +68,16 @@ def generate(container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_
return html
def render(data, container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE):
def sort_by_module_name(item):
# Sort test data by module name,
# and modules starting with `ext-` need to be placed after modules not starting with `ext-`,
if item[0].startswith("ext-"):
return 1, item[0][4:] # sort with higher priority
else:
return 0, item[0] # sort with lower priority
def render(data, container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE, USER_REPO_EXT, USER_BRANCH_EXT):
"""
Return a HTML string
:param data:
@ -95,17 +108,19 @@ def render(data, container, container_url, testdata, USER_REPO, USER_BRANCH, COM
"""
live = 'True' if USER_LIVE == '--live' else 'False'
date = container.split('-')[0]
date = datetime.date.today()
content += """
<p>
Repository: {}<br>
Branch: {}<br>
Repository of extension: {}<br>
Branch of extension: {}<br>
Commit: {}<br>
Live: {}<br>
Date: {}
</p>
""".format(USER_REPO, USER_BRANCH, COMMIT_ID, live, date)
""".format(USER_REPO, USER_BRANCH, USER_REPO_EXT, USER_BRANCH_EXT, COMMIT_ID, live, date)
content += """
<p>
@ -148,17 +163,19 @@ def render(data, container, container_url, testdata, USER_REPO, USER_BRANCH, COM
</tr>
""".format(testdata.total[1], testdata.total[2], testdata.total[3])
for module, passed, failed, rate in testdata.modules:
sorted_modules = sorted(testdata.modules, key=sort_by_module_name)
for module, passed, failed, rate in sorted_modules:
reports = ''
for x in data:
name = x['name']
url = x['url']
if name.startswith(module + '.'):
display_name = 'report'
if 'parallel' in name:
display_name = 'parallel'
elif 'sequential' in name:
display_name = 'sequential'
# if 'parallel' in name:
# display_name = 'parallel'
# elif 'sequential' in name:
# display_name = 'sequential'
try:
html = requests.get(url).content.__str__()
pattern = re.compile('\\d+ tests ran in')

Просмотреть файл

@ -59,7 +59,7 @@ def analyze_data():
return data, regression_data
def create_issue(data, regression_data):
def create_issue(regression_data):
# Create Github issue
headers = {
'Accept': 'application/vnd.github.v3+json'

Просмотреть файл

@ -2,82 +2,541 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import sys
from azure.kusto.data import KustoConnectionStringBuilder
from azure.kusto.data.data_format import DataFormat
from azure.kusto.ingest import (
IngestionProperties,
QueuedIngestClient,
ReportLevel,
)
from bs4 import BeautifulSoup
import csv
import datetime
import generate_index
import json
import logging
import os
import random
import string
import re
import subprocess
import sys
import test_data
import traceback
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
SENDGRID_KEY = sys.argv[1]
BUILD_ID = sys.argv[2]
USER_REPO = sys.argv[3]
USER_BRANCH = sys.argv[4]
USER_TARGET = sys.argv[5]
USER_LIVE = sys.argv[6]
ARTIFACT_DIR = sys.argv[7]
REQUESTED_FOR_EMAIL = sys.argv[8]
ACCOUNT_KEY = sys.argv[9]
COMMIT_ID = sys.argv[10]
DB_PWD = sys.argv[11]
DB_USER = sys.argv[12]
DB_HOST = sys.argv[13]
DB_PORT = sys.argv[14]
DB_SCHEME = sys.argv[15]
COMMIT_ID = sys.argv[1]
ACCOUNT_KEY = os.environ.get('ACCOUNT_KEY')
ARTIFACT_DIR = os.environ.get('ARTIFACTS_DIR')
BUILD_ID = os.environ.get('BUILD_ID')
EMAIL_ADDRESS = os.environ.get('EMAIL_ADDRESS')
EMAIL_KEY = os.environ.get('EMAIL_KEY')
# authenticate with AAD application.
KUSTO_CLIENT_ID = os.environ.get('KUSTO_CLIENT_ID')
KUSTO_CLIENT_SECRET = os.environ.get('KUSTO_CLIENT_SECRET')
KUSTO_CLUSTER = os.environ.get('KUSTO_CLUSTER')
KUSTO_DATABASE = os.environ.get('KUSTO_DATABASE')
KUSTO_TABLE = os.environ.get('KUSTO_TABLE')
# get tenant id from https://docs.microsoft.com/en-us/onedrive/find-your-office-365-tenant-id
KUSTO_TENANT_ID = os.environ.get('KUSTO_TENANT_ID')
PYTHON_VERSION = os.environ.get('PYTHON_VERSION')
USER_BRANCH = os.environ.get('USER_BRANCH')
USER_BRANCH_EXT = os.environ.get('USER_BRANCH_EXT')
USER_LIVE = os.environ.get('USER_LIVE')
USER_REPO = os.environ.get('USER_REPO')
USER_REPO_EXT = os.environ.get('USER_REPO_EXT')
USER_TARGET = os.environ.get('USER_TARGET')
resource_html = """
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8"/>
<title>Remaining resources.html</title>
<style>body {
font-family: Helvetica, Arial, sans-serif;
font-size: 12px;
/* do not increase min-width as some may use split screens */
min-width: 800px;
color: #999;
}
h1 {
font-size: 24px;
color: black;
}
table {
border-collapse: collapse;
}
/******************************
* RESULTS TABLE
*
* 1. Table Layout
* 2. Sorting items
*
******************************/
/*------------------
* 1. Table Layout
*------------------*/
#results-table {
border: 1px solid #e6e6e6;
color: #999;
font-size: 12px;
width: 100%;
}
#results-table th,
#results-table td {
padding: 5px;
border: 1px solid #E6E6E6;
text-align: left;
}
#results-table th {
font-weight: bold;
}
/*------------------
* 2. Sorting items
*------------------*/
.sortable {
cursor: pointer;
}
.sort-icon {
font-size: 0px;
float: left;
margin-right: 5px;
margin-top: 5px;
/*triangle*/
width: 0;
height: 0;
border-left: 8px solid transparent;
border-right: 8px solid transparent;
}
.inactive .sort-icon {
/*finish triangle*/
border-top: 8px solid #E6E6E6;
}
.asc.active .sort-icon {
/*finish triangle*/
border-bottom: 8px solid #999;
}
.desc.active .sort-icon {
/*finish triangle*/
border-top: 8px solid #999;
}
</style></head>
<body onLoad="init()">
<script>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
function toArray(iter) {
if (iter === null) {
return null;
}
return Array.prototype.slice.call(iter);
}
function find(selector, elem) { // eslint-disable-line no-redeclare
if (!elem) {
elem = document;
}
return elem.querySelector(selector);
}
function findAll(selector, elem) {
if (!elem) {
elem = document;
}
return toArray(elem.querySelectorAll(selector));
}
function sortColumn(elem) {
toggleSortStates(elem);
const colIndex = toArray(elem.parentNode.childNodes).indexOf(elem);
let key;
key = keyAlpha;
sortTable(elem, key(colIndex));
}
function showFilters() {
let visibleString = getQueryParameter('visible') || 'all';
visibleString = visibleString.toLowerCase();
const checkedItems = visibleString.split(',');
const filterItems = document.getElementsByClassName('filter');
for (let i = 0; i < filterItems.length; i++) {
filterItems[i].hidden = false;
if (visibleString != 'all') {
filterItems[i].checked = checkedItems.includes(filterItems[i].getAttribute('data-test-result'));
}
}
}
function getQueryParameter(name) {
const match = RegExp('[?&]' + name + '=([^&]*)').exec(window.location.search);
return match && decodeURIComponent(match[1].replace(/\+/g, ' '));
}
function init () { // eslint-disable-line no-unused-vars
resetSortHeaders();
showFilters();
sortColumn(find('.initial-sort'));
findAll('.sortable').forEach(function(elem) {
elem.addEventListener('click',
function() {
sortColumn(elem);
}, false);
});
}
function sortTable(clicked, keyFunc) {
const rows = findAll('.results-table-row');
const reversed = !clicked.classList.contains('asc');
const sortedRows = sort(rows, keyFunc, reversed);
/* Whole table is removed here because browsers acts much slower
* when appending existing elements.
*/
const thead = document.getElementById('results-table-head');
document.getElementById('results-table').remove();
const parent = document.createElement('table');
parent.id = 'results-table';
parent.appendChild(thead);
sortedRows.forEach(function(elem) {
parent.appendChild(elem);
});
document.getElementsByTagName('BODY')[0].appendChild(parent);
}
function sort(items, keyFunc, reversed) {
const sortArray = items.map(function(item, i) {
return [keyFunc(item), i];
});
sortArray.sort(function(a, b) {
const keyA = a[0];
const keyB = b[0];
if (keyA == keyB) return 0;
if (reversed) {
return keyA < keyB ? 1 : -1;
} else {
return keyA > keyB ? 1 : -1;
}
});
return sortArray.map(function(item) {
const index = item[1];
return items[index];
});
}
function keyAlpha(colIndex) {
return function(elem) {
return elem.childNodes[1].childNodes[colIndex].firstChild.data.toLowerCase();
};
}
function resetSortHeaders() {
findAll('.sort-icon').forEach(function(elem) {
elem.parentNode.removeChild(elem);
});
findAll('.sortable').forEach(function(elem) {
const icon = document.createElement('div');
icon.className = 'sort-icon';
icon.textContent = 'vvv';
elem.insertBefore(icon, elem.firstChild);
elem.classList.remove('desc', 'active');
elem.classList.add('asc', 'inactive');
});
}
function toggleSortStates(elem) {
//if active, toggle between asc and desc
if (elem.classList.contains('active')) {
elem.classList.toggle('asc');
elem.classList.toggle('desc');
}
//if inactive, reset all other functions and add ascending active
if (elem.classList.contains('inactive')) {
resetSortHeaders();
elem.classList.remove('inactive');
elem.classList.add('active');
}
}
</script>
<h1>Resources to clean up</h1>
<table id="results-table">
<thead id="results-table-head">
<tr>
<th class="sortable initial-sort" col="module">Module</th>
<th class="sortable" col="test-case">Test Case</th>
<th class="sortable" col="date">Date</th>
<th class="sortable" col="resource-group">Resource Group</th>
</tr>
</table>
</body>
</html>
"""
def main():
logger.warning('Enter main()')
logger.info('Enter main()')
logger.warning(sys.argv)
logger.warning(SENDGRID_KEY)
logger.warning(BUILD_ID)
logger.warning(USER_REPO)
logger.warning(USER_BRANCH)
logger.warning(USER_TARGET)
logger.warning(USER_LIVE)
logger.warning(ARTIFACT_DIR)
logger.warning(REQUESTED_FOR_EMAIL)
logger.warning(ACCOUNT_KEY)
logger.warning(COMMIT_ID)
logger.warning(DB_PWD)
# Upload results to storage account, container
container = ''
try:
logger.warning('Uploading test results to storage account...')
container = get_container_name()
upload_files(container)
except Exception:
logger.exception(traceback.format_exc())
logger.info(BUILD_ID)
logger.info(USER_REPO)
logger.info(USER_BRANCH)
logger.info(USER_TARGET)
logger.info(USER_LIVE)
logger.info(ARTIFACT_DIR)
logger.info(EMAIL_ADDRESS)
logger.info(COMMIT_ID)
# Collect statistics
testdata = test_data.TestData(ARTIFACT_DIR)
testdata.collect()
# Summary data and send to kusto db
summary_data(testdata)
# Upload results to storage account, container
container = ''
try:
logger.info('Uploading test results to storage account...')
container = get_container_name()
upload_files(container)
except Exception:
logger.exception(traceback.format_exc())
# Generate index.html, send email
try:
# Generate index.html
container_url = 'https://clitestresultstac.blob.core.windows.net/' + container
html_content = generate_index.generate(container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE, USER_TARGET)
html_content = generate_index.generate(container, container_url, testdata, USER_REPO, USER_BRANCH, COMMIT_ID, USER_LIVE, USER_TARGET, ACCOUNT_KEY, USER_REPO_EXT, USER_BRANCH_EXT)
# Send email
send_email(html_content)
except Exception:
logger.exception(traceback.format_exc())
# Write database
# try:
# write_db(container, testdata)
# except Exception:
# logger.exception(traceback.format_exc())
get_remaining_tests()
logger.info('Exit main()')
logger.warning('Exit main()')
def get_remaining_tests():
# get residual resources after live test finished
logger.info('Enter get_remaining_tests()')
cmd = ['az', 'group', 'list', '--tag', 'module', '--query', '[][name, tags]']
logger.info(cmd)
out = subprocess.run(cmd, capture_output=True)
remaing_tests = json.loads(out.stdout) if out.stdout else []
if remaing_tests:
# sorted remaing tests by module name and test name
sorted_tests = sorted(remaing_tests, key=lambda x: (x[1]['module'], x[1]['test']))
soup = BeautifulSoup(resource_html, 'html.parser')
for test in sorted_tests:
module = test[1]['module']
test_name = test[1]['test']
date = test[1]['date']
group = test[0]
tbody = soup.new_tag('tbody', **{'class': 'results-table-row'})
tr = soup.new_tag('tr')
td_module = soup.new_tag('td', **{'class': 'col-module'})
td_module.string = module
tr.append(td_module)
td_test = soup.new_tag('td', **{'class': 'col-test-case'})
td_test.string = test_name
tr.append(td_test)
td_date = soup.new_tag('td', **{'class': 'col-date'})
td_date.string = date
tr.append(td_date)
td_group = soup.new_tag('td', **{'class': 'col-resource-group'})
td_group.string = group
tr.append(td_group)
tbody.append(tr)
soup.table.append(tbody)
with open('resource.html', 'w') as f:
f.write(str(soup))
logger.info('resource.html: ' + str(soup))
cmd = 'az storage blob upload -f resource.html -c {} -n resource.html --account-name clitestresultstac --account-key {}'.format(BUILD_ID, ACCOUNT_KEY)
logger.info('Running: ' + cmd)
os.system(cmd)
def summary_data(testdata):
# summary data by module and platform
logger.info('Enter summary_data_by_module()')
modules = [module[0].split('.')[0] for module in testdata.modules]
data = []
for idx, module in enumerate(modules):
total_test = testdata.modules[idx][1] + testdata.modules[idx][2]
passed = testdata.modules[idx][1]
failed = testdata.modules[idx][2]
html_name = '.'.join([module, 'report.html'])
src_soup = ''
for root, dirs, files in os.walk(ARTIFACT_DIR):
First = True
dst_html = os.path.join(root, html_name)
for file in files:
if file.startswith(module) and file.endswith('html') and First:
First = False
platform = file.split('.')[1]
first = os.path.join(root, file)
try:
data.extend(html_to_csv(first, module, platform))
except Exception as e:
logger.error(f'Error load {first}')
First = True
continue
with open(first, 'r') as f:
src_html = f.read()
src_soup = BeautifulSoup(src_html, 'html.parser')
th = src_soup.find('thead', id='results-table-head')
tr = th.find('tr')
new_th = src_soup.new_tag('th', **{'class': 'sortable', 'col': 'platform'})
new_th.string = 'Platform'
tr.insert(2, new_th)
tbodys = src_soup.findAll('tbody')
for tbody in tbodys:
tr = tbody.find('tr')
new_col = src_soup.new_tag('td', **{'class': 'col-platform'})
new_col.string = platform
tr.insert(2, new_col)
src_soup.find('title').string = f'{module}.html'
src_soup.find('h1').string = f'{module}.html'
env = src_soup.find('table', id='environment')
if env:
env_trs = env.findAll('tr')
for tr in env_trs:
if 'Platform' in tr.text:
tr.decompose()
inputs = src_soup.findAll('input')
for i in inputs:
if 'disabled' in i.attrs:
del i['disabled']
src_soup.find('span', {'class': 'passed'}).string = f'{passed} passed'
src_soup.find('span', {'class': 'failed'}).string = f'{failed} failed'
# src_soup.find('span', {'class': 'skipped'}).string = f'{skiped} skipped'
elif file.startswith(module) and file.endswith('html'):
platform = file.split('.')[1]
other = os.path.join(root, file)
try:
data.extend(html_to_csv(other, module, platform))
except Exception as e:
logger.error(f'Error load {other}')
continue
with open(other, 'r') as f:
other_html = f.read()
other_soup = BeautifulSoup(other_html, 'html.parser')
tbodys = other_soup.findAll('tbody')
for tbody in tbodys:
tr = tbody.find('tr')
new_col = src_soup.new_tag('td', **{'class': 'col-platform'})
new_col.string = platform
tr.insert(2, new_col)
table = src_soup.find('table', id='results-table')
for tbody in tbodys:
table.append(tbody)
p1 = src_soup.find('p', string=re.compile('.*tests ran in.*'))
duration = p1.string.split(' ')[-3]
p2 = other_soup.find('p', string=re.compile('.*tests ran in.*'))
duration2 = p2.string.split(' ')[-3]
duration = float(duration) + float(duration2)
p1.string = f'{total_test} tests ran in {duration} seconds. '
with open(dst_html, 'w') as f:
f.write(str(src_soup))
# send to kusto db
if USER_TARGET.lower() in ['all', ''] \
and USER_REPO == 'https://github.com/Azure/azure-cli.git' \
and USER_REPO_EXT == 'https://github.com/Azure/azure-cli-extensions.git' \
and USER_BRANCH == 'dev' and USER_BRANCH_EXT == 'main' \
and USER_LIVE == '--live' and data:
send_to_kusto(data)
for root, dirs, files in os.walk(ARTIFACT_DIR):
for file in files:
if len(file.split('.')) > 3 and file.endswith('html'):
os.remove(os.path.join(root, file))
def html_to_csv(html_file, module, platform):
data = []
if os.path.exists(html_file):
with open(html_file) as file:
bs = BeautifulSoup(file, "html.parser")
results = bs.find(id="results-table")
Source = 'LiveTest'
BuildId = BUILD_ID
Module = module
Description = ''
ExtendedProperties = ''
for result in results.find_all('tbody'):
Name = result.find('td', {'class': 'col-name'}).text.split('::')[-1]
Duration = result.find('td', {'class': 'col-duration'}).text
Status = result.find('td', {'class': 'col-result'}).text
if Status == 'Failed':
contents = result.find('td', {'class': 'extra'}).find('div', {'class': 'log'}).contents
Details = ''
for content in contents:
if content.name == 'br':
Details += '\n'
elif not content.name:
Details += content
else:
logger.info(content.name) if content.name != 'span' else None
else:
Details = ''
EndDateTime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
StartDateTime = (datetime.datetime.now() - datetime.timedelta(seconds=int(float(Duration)))).strftime(
"%Y-%m-%d %H:%M:%S")
data.append(
[Source, BuildId, platform, PYTHON_VERSION, Module, Name, Description, StartDateTime, EndDateTime,
Duration, Status, Details, ExtendedProperties])
return data
def send_to_kusto(data):
logger.info('Start send csv data to kusto db')
with open(f'{ARTIFACT_DIR}/livetest.csv', mode='w', newline='') as file:
writer = csv.writer(file)
writer.writerows(data)
logger.info('Finish generate csv file for live test.')
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(KUSTO_CLUSTER, KUSTO_CLIENT_ID, KUSTO_CLIENT_SECRET, KUSTO_TENANT_ID)
# The authentication method will be taken from the chosen KustoConnectionStringBuilder.
client = QueuedIngestClient(kcsb)
# there are a lot of useful properties, make sure to go over docs and check them out
ingestion_props = IngestionProperties(
database=KUSTO_DATABASE,
table=KUSTO_TABLE,
data_format=DataFormat.CSV,
report_level=ReportLevel.FailuresAndSuccesses
)
# ingest from file
result = client.ingest_from_file(f"{ARTIFACT_DIR}/livetest.csv", ingestion_properties=ingestion_props)
# Inspect the result for useful information, such as source_id and blob_url
print(repr(result))
logger.info('Finsh send live test csv data to kusto db.')
def get_container_name():
@ -85,11 +544,9 @@ def get_container_name():
Generate container name in storage account. It is also an identifier of the pipeline run.
:return:
"""
logger.warning('Enter get_container_name()')
time = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
random_id = ''.join(random.choice(string.digits) for _ in range(6))
name = time + '-' + random_id
logger.warning('Exit get_container_name()')
logger.info('Enter get_container_name()')
name = BUILD_ID
logger.info('Exit get_container_name()')
return name
@ -99,7 +556,7 @@ def upload_files(container):
:param container:
:return:
"""
logger.warning('Enter upload_files()')
logger.info('Enter upload_files()')
# Create container
cmd = 'az storage container create -n {} --account-name clitestresultstac --account-key {} --public-access container'.format(container, ACCOUNT_KEY)
@ -110,143 +567,17 @@ def upload_files(container):
for name in files:
if name.endswith('html') or name.endswith('json'):
fullpath = os.path.join(root, name)
cmd = 'az storage blob upload -f {} -c {} -n {} --account-name clitestresultstac'
cmd = cmd.format(fullpath, container, name)
logger.warning('Running: ' + cmd)
cmd = 'az storage blob upload -f {} -c {} -n {} --account-name clitestresultstac --account-key {}'.format(fullpath, container, name, ACCOUNT_KEY)
os.system(cmd)
logger.warning('Exit upload_files()')
def write_db(container, testdata):
"""
Insert data to database.
Sql statements to create table:
USE clidb;
CREATE TABLE `t1` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`repr` varchar(30) DEFAULT NULL COMMENT 'date_time_random6digits',
`repo` varchar(200) DEFAULT NULL COMMENT 'Repo URL',
`branch` varchar(200) DEFAULT NULL COMMENT 'Branch name',
`commit` varchar(50) DEFAULT NULL COMMENT 'Commit ID',
`target` varchar(2000) DEFAULT NULL COMMENT 'Target modules to test. Splited by space, Empty string represents all modules',
`live` tinyint(1) DEFAULT NULL COMMENT 'Live run or not',
`user` varchar(50) DEFAULT NULL COMMENT 'User (email address) who triggers the run',
`pass` int(11) DEFAULT NULL COMMENT 'Number of passed tests',
`fail` int(11) DEFAULT NULL COMMENT 'Number of failed tests',
`rate` varchar(50) DEFAULT NULL COMMENT 'Pass rate',
`detail` varchar(10000) DEFAULT NULL COMMENT 'Detail',
`container` varchar(200) DEFAULT NULL COMMENT 'Container URL',
`date` varchar(10) DEFAULT NULL COMMENT 'Date. E.g. 20200801',
`time` varchar(10) DEFAULT NULL COMMENT 'Time. E.g. 183000',
PRIMARY KEY (`id`),
UNIQUE KEY `repr` (`repr`)
);
"""
logger.warning('Enter write_db()')
logger.warning('container {}'.format(container))
logger.warning('testdata {}'.format(testdata))
import mysql.connector
logger.warning('Connect DB...')
# Connect
cnx = mysql.connector.connect(user=DB_USER,
password=DB_PWD,
host=DB_HOST,
port=DB_PORT,
database=DB_SCHEME,
connection_timeout=30)
logger.warning('Connect DB Success')
cursor = cnx.cursor()
sql = 'INSERT INTO t1 (repr, repo, branch, commit, target, live, user, pass, fail, rate, detail, container, date, time) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);'
logger.warning(sql)
repr = container
repo = USER_REPO
branch = USER_BRANCH
commit = COMMIT_ID
target = USER_TARGET
live = 1 if USER_LIVE == '--live' else 0
user = REQUESTED_FOR_EMAIL
pass0 = testdata.total[1]
fail = testdata.total[2]
rate = testdata.total[3]
detail = str(testdata.modules)
container_url = 'https://clitestresultstac.blob.core.windows.net/{}/index.html'.format(container)
terms = container.split('-')
date = terms[0]
time = terms[1]
data = (repr, repo, branch, commit, target, live, user, pass0, fail, rate, detail, container_url, date, time)
logger.warning(data)
cursor.execute(sql, data)
# Make sure data is committed to the database
cnx.commit()
# Insert into t2
sql = 'SELECT id FROM t1 WHERE repr = %s'
cursor.execute(sql, (repr,))
id0 = None
for value in cursor:
id0 = value[0]
if id0:
for module, passed, failed, rate in testdata.modules:
sql = 'INSERT INTO t2 (module, pass, fail, rate, ref_id) VALUES (%s, %s, %s, %s, %s)'
data = (module, passed, failed, rate, id0)
logger.warning(sql)
logger.warning(data)
cursor.execute(sql, data)
cnx.commit()
# Close
cursor.close()
cnx.close()
logger.warning('Exit write_db()')
# def send_email(html_content):
# logger.warning('Enter send_email()')
# from sendgrid import SendGridAPIClient
# logger.warning('Sending email...')
#
# data = {
# "personalizations": [
# {
# "to": [],
# "subject": "Test results of Azure CLI"
# }
# ],
# "from": {
# "email": "azclibot@microsoft.com"
# },
# "content": [
# {
# "type": "text/html",
# "value": html_content
# }
# ]
# }
#
# if REQUESTED_FOR_EMAIL != '':
# data['personalizations'][0]['to'].append({'email': REQUESTED_FOR_EMAIL})
# if USER_TARGET == '' and USER_REPO == 'https://github.com/Azure/azure-cli.git' and USER_BRANCH == 'dev' and USER_LIVE == '--live' and REQUESTED_FOR_EMAIL == '':
# data['personalizations'][0]['to'].append({'email': 'AzPyCLI@microsoft.com'})
# data['personalizations'][0]['to'].append({'email': 'antcliTest@microsoft.com'})
# logger.warning(data)
#
# sendgrid_key = sys.argv[1]
# sg = SendGridAPIClient(sendgrid_key)
# response = sg.send(data)
# logger.warning(response.status_code)
# logger.warning(response.body)
# logger.warning(response.headers)
# logger.warning('Exit send_email()')
logger.info('Exit upload_files()')
def send_email(html_content):
logger.warning('Sending email...')
logger.info('Sending email...')
from azure.communication.email import EmailClient
client = EmailClient.from_connection_string(SENDGRID_KEY);
client = EmailClient.from_connection_string(EMAIL_KEY);
content = {
"subject": "Test results of Azure CLI",
"html": html_content,
@ -254,16 +585,19 @@ def send_email(html_content):
recipients = ''
if REQUESTED_FOR_EMAIL != '':
if EMAIL_ADDRESS != '':
recipients = {
"to": [
{
"address": REQUESTED_FOR_EMAIL
"address": EMAIL_ADDRESS
},
]
}
# TODO: USER_TARGET == 'all'
elif USER_TARGET == '' and USER_REPO == 'https://github.com/Azure/azure-cli.git' and USER_BRANCH == 'dev' and USER_LIVE == '--live' and REQUESTED_FOR_EMAIL == '':
elif USER_TARGET.lower() in ['all', ''] \
and USER_REPO == 'https://github.com/Azure/azure-cli.git' \
and USER_REPO_EXT == 'https://github.com/Azure/azure-cli-extensions.git' \
and USER_BRANCH == 'dev' and USER_BRANCH_EXT == 'main' \
and USER_LIVE == '--live' and EMAIL_ADDRESS == '':
recipients = {
"to": [
{
@ -283,102 +617,9 @@ def send_email(html_content):
}
client.begin_send(message)
logger.warning('Finish sending email')
logger.info('Finish sending email')
else:
logger.warning('No recipients, skip sending email')
def get_content(container, testdata):
"""
Compose content of email
:return:
"""
logger.warning('Enter get_content()')
content = """
<!DOCTYPE html>
<html>
<head>
<style>
table, th, td {
border: 1px solid black;
border-collapse: collapse;
}
</style>
</head>
<body>
"""
link = 'https://dev.azure.com/azure-sdk/internal/_build/results?buildId={}&view=ms.vss-test-web.build-test-results-tab'.format(BUILD_ID)
content += """
<p>Hi Azure CLI team,</p>
<p>[Please move this mail to normal folder if it is in junk box, otherwise, the HTML and CSS content may not be displayed correctly]</p>
<p>
Here are test results of Azure CLI.<br>
Repository: {}<br>
Branch: {}<br>
Link: {}
</p>
""".format(USER_REPO, USER_BRANCH, link)
content += """
<p>
<b>User Manual of Live Test Pipeline</b><br>
<a href=https://microsoft-my.sharepoint.com/:w:/p/fey/EZGC9LwrN3RAscVS5ylG4HMBX9h7W0ZSA7CDrhXN5Lvx6g?e=V8HUmd>Word</a>
<a href=https://microsoft.sharepoint.com/teams/IoTToolingTeam/_layouts/OneNote.aspx?id=%2Fteams%2FIoTToolingTeam%2FShared%20Documents%2FAzure%20Management%20Experience%2FAzure%20Management%20Experience&wd=target%28AZ%20CLI%2FKnowledge%20base.one%7C18BC64EE-9328-497D-804E-6436006CA9A5%2FUser%20Manual%20of%20Live%20Test%20Pipeline%7C243EFA3E-FC7F-4612-9DA5-8E6BB2A11BD3%2F%29>OneNote</a>
</p>
"""
if container != '':
content += """
<p>
<b>Test results location</b><br>
Storage account: /subscriptions/0b1f6471-1bf0-4dda-aec3-cb9272f09590/resourceGroups/clitestresult/providers/Microsoft.Storage/storageAccounts/clitestresultstac <br>
Container: {}
</p>
""".format(container)
table = """
<p><b>Test results summary</b></p>
<table>
<tr>
<th>Module</th>
<th>Passed</th>
<th>Failed</th>
<th>Pass rate</th>
</tr>
"""
for module, passed, failed, rate in testdata.modules:
table += """
<tr>
<td>{}</td>
<td>{}</td>
<td>{}</td>
<td>{}</td>
</tr>
""".format(module, passed, failed, rate)
table += """
<tr>
<td>Total</td>
<td>{}</td>
<td>{}</td>
<td>{}</td>
</tr>
</table>
""".format(testdata.total[1], testdata.total[2], testdata.total[3])
content += table
content += """
</body>
</html>
"""
logger.warning(content)
logger.warning('Exit get_content()')
return content
logger.info('No recipients, skip sending email')
if __name__ == '__main__':