Merge branch 'dev' into hallvictoria/aio-support
This commit is contained in:
Коммит
1edd5b7a08
|
@ -1,50 +0,0 @@
|
||||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
# Licensed under the MIT License.
|
|
||||||
|
|
||||||
# E2E Integration Test Pipeline
|
|
||||||
|
|
||||||
pr: none
|
|
||||||
trigger: none
|
|
||||||
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-latest'
|
|
||||||
|
|
||||||
variables:
|
|
||||||
DOTNET_VERSION: '3.1.x'
|
|
||||||
DOTNET_VERSION_6: '6.x'
|
|
||||||
PYTHON_VERSION: '3.9'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- task: UsePythonVersion@0
|
|
||||||
displayName: 'Install Python'
|
|
||||||
inputs:
|
|
||||||
versionSpec: $(PYTHON_VERSION)
|
|
||||||
addToPath: true
|
|
||||||
- task: UseDotNet@2
|
|
||||||
displayName: 'Install DotNet 3'
|
|
||||||
inputs:
|
|
||||||
packageType: 'sdk'
|
|
||||||
version: $(DOTNET_VERSION)
|
|
||||||
- task: UseDotNet@2
|
|
||||||
displayName: 'Install DotNet 6'
|
|
||||||
inputs:
|
|
||||||
packageType: 'sdk'
|
|
||||||
version: $(DOTNET_VERSION_6)
|
|
||||||
- pwsh: '$(Build.SourcesDirectory)/.ci/e2e_integration_test/start-e2e.ps1'
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: $(AzureWebJobsStorage)
|
|
||||||
AzureWebJobsCosmosDBConnectionString: $(AzureWebJobsCosmosDBConnectionString)
|
|
||||||
AzureWebJobsEventHubConnectionString: $(AzureWebJobsEventHubConnectionString)
|
|
||||||
AzureWebJobsServiceBusConnectionString: $(AzureWebJobsServiceBusConnectionString)
|
|
||||||
AzureWebJobsSqlConnectionString: $(AzureWebJobsSqlConnectionString)
|
|
||||||
AzureWebJobsEventGridTopicUri: $(AzureWebJobsEventGridTopicUri)
|
|
||||||
AzureWebJobsEventGridConnectionKey: $(AzureWebJobsEventGridConnectionKey)
|
|
||||||
PythonVersion: $(PYTHON_VERSION)
|
|
||||||
displayName: 'Running Python Language Worker E2E Tests'
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
displayName: 'Publish Python Worker E2E Test Result'
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: 'e2e-integration-test-report.xml'
|
|
||||||
testRunTitle: 'Publish Python Worker E2E Test Result'
|
|
||||||
failTaskOnFailedTests: true
|
|
|
@ -1,17 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e -x
|
|
||||||
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
|
|
||||||
# Install the latest Azure Functions Python Worker from test.pypi.org
|
|
||||||
python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U -e .[dev]
|
|
||||||
|
|
||||||
# Install the latest Azure Functions Python Library from test.pypi.org
|
|
||||||
python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre
|
|
||||||
|
|
||||||
# Download Azure Functions Host
|
|
||||||
python setup.py webhost
|
|
||||||
|
|
||||||
# Setup WebJobs Extensions
|
|
||||||
python setup.py extension
|
|
|
@ -1,12 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e -x
|
|
||||||
export AzureWebJobsStorage=$LINUXSTORAGECONNECTIONSTRING
|
|
||||||
export AzureWebJobsCosmosDBConnectionString=$LINUXCOSMOSDBCONNECTIONSTRING
|
|
||||||
export AzureWebJobsEventHubConnectionString=$LINUXEVENTHUBCONNECTIONSTRING
|
|
||||||
export AzureWebJobsServiceBusConnectionString=$LINUXSERVICEBUSCONNECTIONSTRING
|
|
||||||
export AzureWebJobsSqlConnectionString=$LINUXSQLCONNECTIONSTRING
|
|
||||||
export AzureWebJobsEventGridTopicUri=$LINUXEVENTGRIDTOPICURI
|
|
||||||
export AzureWebJobsEventGridConnectionKey=$LINUXEVENTGRIDTOPICCONNECTIONKEY
|
|
||||||
|
|
||||||
pytest --instafail --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend
|
|
|
@ -1,6 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
wget -q https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb \
|
|
||||||
&& sudo dpkg -i packages-microsoft-prod.deb \
|
|
||||||
&& sudo apt-get update \
|
|
||||||
&& sudo apt-get install -y azure-functions-core-tools
|
|
|
@ -1,4 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e -x
|
|
||||||
pytest --instafail --cov=./azure_functions_worker --cov-report xml --cov-branch tests/unittests
|
|
|
@ -1,26 +0,0 @@
|
||||||
ARG PYTHON_VERSION=3.11
|
|
||||||
|
|
||||||
FROM mcr.microsoft.com/azure-functions/python:4-python$PYTHON_VERSION
|
|
||||||
|
|
||||||
# Mounting local machines azure-functions-python-worker and azure-functions-python-library onto it
|
|
||||||
RUN rm -rf /azure-functions-host/workers/python/${PYTHON_VERSION}/LINUX/X64/azure_functions_worker
|
|
||||||
|
|
||||||
# Use the following command to run the docker image with customizible worker and library
|
|
||||||
VOLUME ["/azure-functions-host/workers/python/${PYTHON_VERSION}/LINUX/X64/azure_functions_worker"]
|
|
||||||
|
|
||||||
ENV AzureWebJobsScriptRoot=/home/site/wwwroot \
|
|
||||||
AzureFunctionsJobHost__Logging__Console__IsEnabled=true \
|
|
||||||
FUNCTIONS_WORKER_PROCESS_COUNT=1 \
|
|
||||||
AZURE_FUNCTIONS_ENVIRONMENT=Development \
|
|
||||||
FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED=1
|
|
||||||
|
|
||||||
RUN apt-get --quiet update && \
|
|
||||||
apt-get install --quiet -y git procps && \
|
|
||||||
# Procps is required for displaying worker and profiling processes info
|
|
||||||
cd /home && \
|
|
||||||
git clone https://github.com/vrdmr/AzFunctionsPythonPerformance.git && \
|
|
||||||
mkdir -p /home/site/wwwroot/ && \
|
|
||||||
cp -r AzFunctionsPythonPerformance/* /home/site/wwwroot/ && \
|
|
||||||
pip install -q -r /home/site/wwwroot/requirements.txt
|
|
||||||
|
|
||||||
CMD [ "/azure-functions-host/Microsoft.Azure.WebJobs.Script.WebHost" ]
|
|
|
@ -1,211 +0,0 @@
|
||||||
#
|
|
||||||
# NOTE: THIS DOCKERFILE IS SPECIALLY MADE FOR PYTHON 3.9 WORKLOAD.
|
|
||||||
#
|
|
||||||
|
|
||||||
# Build the runtime from source
|
|
||||||
ARG HOST_VERSION=3.0.14492
|
|
||||||
FROM mcr.microsoft.com/dotnet/core/sdk:3.1 AS runtime-image
|
|
||||||
ARG HOST_VERSION
|
|
||||||
|
|
||||||
ENV PublishWithAspNetCoreTargetManifest=false
|
|
||||||
|
|
||||||
RUN BUILD_NUMBER=$(echo ${HOST_VERSION} | cut -d'.' -f 3) && \
|
|
||||||
git clone --branch v${HOST_VERSION} https://github.com/Azure/azure-functions-host /src/azure-functions-host && \
|
|
||||||
cd /src/azure-functions-host && \
|
|
||||||
HOST_COMMIT=$(git rev-list -1 HEAD) && \
|
|
||||||
dotnet publish -v q /p:BuildNumber=$BUILD_NUMBER /p:CommitHash=$HOST_COMMIT src/WebJobs.Script.WebHost/WebJobs.Script.WebHost.csproj -c Release --output /azure-functions-host --runtime linux-x64 && \
|
|
||||||
mv /azure-functions-host/workers /workers && mkdir /azure-functions-host/workers && \
|
|
||||||
rm -rf /root/.local /root/.nuget /src
|
|
||||||
|
|
||||||
RUN EXTENSION_BUNDLE_VERSION=1.3.3 && \
|
|
||||||
EXTENSION_BUNDLE_FILENAME=Microsoft.Azure.Functions.ExtensionBundle.1.3.3_linux-x64.zip && \
|
|
||||||
apt-get update && \
|
|
||||||
apt-get install -y gnupg wget unzip && \
|
|
||||||
wget https://functionscdn.azureedge.net/public/ExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION/$EXTENSION_BUNDLE_FILENAME && \
|
|
||||||
mkdir -p /FuncExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION && \
|
|
||||||
unzip /$EXTENSION_BUNDLE_FILENAME -d /FuncExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION && \
|
|
||||||
rm -f /$EXTENSION_BUNDLE_FILENAME && \
|
|
||||||
EXTENSION_BUNDLE_VERSION_V2=2.0.1 && \
|
|
||||||
EXTENSION_BUNDLE_FILENAME_V2=Microsoft.Azure.Functions.ExtensionBundle.2.0.1_linux-x64.zip && \
|
|
||||||
wget https://functionscdn.azureedge.net/public/ExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION_V2/$EXTENSION_BUNDLE_FILENAME_V2 && \
|
|
||||||
mkdir -p /FuncExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION_V2 && \
|
|
||||||
unzip /$EXTENSION_BUNDLE_FILENAME_V2 -d /FuncExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION_V2 && \
|
|
||||||
rm -f /$EXTENSION_BUNDLE_FILENAME_V2 &&\
|
|
||||||
find /FuncExtensionBundles/ -type f -exec chmod 644 {} \;
|
|
||||||
|
|
||||||
FROM buildpack-deps:buster
|
|
||||||
|
|
||||||
# ensure local python is preferred over distribution python
|
|
||||||
ENV PATH /usr/local/bin:$PATH
|
|
||||||
|
|
||||||
# http://bugs.python.org/issue19846
|
|
||||||
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
|
|
||||||
ENV LANG C.UTF-8
|
|
||||||
|
|
||||||
# extra dependencies (over what buildpack-deps already includes)
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
libbluetooth-dev \
|
|
||||||
tk-dev \
|
|
||||||
uuid-dev \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
ENV GPG_KEY E3FF2839C048B25C084DEBE9B26995E310250568
|
|
||||||
ENV PYTHON_VERSION 3.9.0
|
|
||||||
|
|
||||||
RUN set -ex \
|
|
||||||
\
|
|
||||||
&& wget -O python.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \
|
|
||||||
&& wget -O python.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \
|
|
||||||
&& export GNUPGHOME="$(mktemp -d)" \
|
|
||||||
&& gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$GPG_KEY" \
|
|
||||||
&& gpg --batch --verify python.tar.xz.asc python.tar.xz \
|
|
||||||
&& { command -v gpgconf > /dev/null && gpgconf --kill all || :; } \
|
|
||||||
&& rm -rf "$GNUPGHOME" python.tar.xz.asc \
|
|
||||||
&& mkdir -p /usr/src/python \
|
|
||||||
&& tar -xJC /usr/src/python --strip-components=1 -f python.tar.xz \
|
|
||||||
&& rm python.tar.xz \
|
|
||||||
&& cd /usr/src/python \
|
|
||||||
&& gnuArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)" \
|
|
||||||
&& ./configure \
|
|
||||||
--build="$gnuArch" \
|
|
||||||
--enable-loadable-sqlite-extensions \
|
|
||||||
--enable-optimizations \
|
|
||||||
--enable-option-checking=fatal \
|
|
||||||
--enable-shared \
|
|
||||||
--with-system-expat \
|
|
||||||
--with-system-ffi \
|
|
||||||
--without-ensurepip \
|
|
||||||
&& make -j "$(nproc)" \
|
|
||||||
&& make install \
|
|
||||||
&& rm -rf /usr/src/python \
|
|
||||||
&& find /usr/local -depth \
|
|
||||||
\( \
|
|
||||||
\( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \
|
|
||||||
-o \( -type f -a \( -name '*.pyc' -o -name '*.pyo' -o -name '*.a' \) \) \
|
|
||||||
\) -exec rm -rf '{}' + \
|
|
||||||
\
|
|
||||||
&& ldconfig \
|
|
||||||
&& python3 --version
|
|
||||||
|
|
||||||
# make some useful symlinks that are expected to exist
|
|
||||||
RUN cd /usr/local/bin \
|
|
||||||
&& ln -s idle3 idle \
|
|
||||||
&& ln -s pydoc3 pydoc \
|
|
||||||
&& ln -s python3 python \
|
|
||||||
&& ln -s python3-config python-config
|
|
||||||
|
|
||||||
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
|
|
||||||
ENV PYTHON_PIP_VERSION 20.2.3
|
|
||||||
# https://github.com/pypa/get-pip
|
|
||||||
ENV PYTHON_GET_PIP_URL https://github.com/pypa/get-pip/raw/fa7dc83944936bf09a0e4cb5d5ec852c0d256599/get-pip.py
|
|
||||||
ENV PYTHON_GET_PIP_SHA256 6e0bb0a2c2533361d7f297ed547237caf1b7507f197835974c0dd7eba998c53c
|
|
||||||
|
|
||||||
RUN set -ex; \
|
|
||||||
\
|
|
||||||
wget -O get-pip.py "$PYTHON_GET_PIP_URL"; \
|
|
||||||
echo "$PYTHON_GET_PIP_SHA256 *get-pip.py" | sha256sum --check --strict -; \
|
|
||||||
\
|
|
||||||
python get-pip.py \
|
|
||||||
--disable-pip-version-check \
|
|
||||||
--no-cache-dir \
|
|
||||||
"pip==$PYTHON_PIP_VERSION" \
|
|
||||||
; \
|
|
||||||
pip --version; \
|
|
||||||
\
|
|
||||||
find /usr/local -depth \
|
|
||||||
\( \
|
|
||||||
\( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \
|
|
||||||
-o \
|
|
||||||
\( -type f -a \( -name '*.pyc' -o -name '*.pyo' \) \) \
|
|
||||||
\) -exec rm -rf '{}' +; \
|
|
||||||
rm -f get-pip.py
|
|
||||||
|
|
||||||
ARG HOST_VERSION
|
|
||||||
|
|
||||||
ENV LANG=C.UTF-8 \
|
|
||||||
ACCEPT_EULA=Y \
|
|
||||||
AzureWebJobsScriptRoot=/home/site/wwwroot \
|
|
||||||
HOME=/home \
|
|
||||||
FUNCTIONS_WORKER_RUNTIME=python \
|
|
||||||
ASPNETCORE_URLS=http://+:80 \
|
|
||||||
DOTNET_RUNNING_IN_CONTAINER=true \
|
|
||||||
DOTNET_USE_POLLING_FILE_WATCHER=true \
|
|
||||||
HOST_VERSION=${HOST_VERSION}
|
|
||||||
|
|
||||||
# Install Python dependencies
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt-get install -y wget vim && \
|
|
||||||
echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections && \
|
|
||||||
apt-get update && \
|
|
||||||
apt-get install -y apt-transport-https curl gnupg && \
|
|
||||||
curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \
|
|
||||||
curl https://packages.microsoft.com/config/debian/9/prod.list > /etc/apt/sources.list.d/mssql-release.list && \
|
|
||||||
# Needed for libss1.0.0 and in turn MS SQL
|
|
||||||
echo 'deb http://security.debian.org/debian-security jessie/updates main' >> /etc/apt/sources.list && \
|
|
||||||
# install necessary locales for MS SQL
|
|
||||||
apt-get update && apt-get install -y locales && \
|
|
||||||
echo 'en_US.UTF-8 UTF-8' > /etc/locale.gen && \
|
|
||||||
locale-gen && \
|
|
||||||
# install MS SQL related packages
|
|
||||||
apt-get update && \
|
|
||||||
apt-get install -y unixodbc msodbcsql17 mssql-tools && \
|
|
||||||
# .NET Core dependencies
|
|
||||||
apt-get install -y --no-install-recommends ca-certificates \
|
|
||||||
libc6 libgcc1 libgssapi-krb5-2 libicu63 libssl1.1 libstdc++6 zlib1g && \
|
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
|
||||||
# Custom dependencies:
|
|
||||||
# OpenCV dependencies:
|
|
||||||
apt-get update && \
|
|
||||||
apt-get install -y libglib2.0-0 libsm6 libxext6 libxrender-dev && \
|
|
||||||
# binutils
|
|
||||||
apt-get install -y binutils && \
|
|
||||||
# OpenMP dependencies
|
|
||||||
apt-get install -y libgomp1 && \
|
|
||||||
# mysql dependencies
|
|
||||||
apt-get install -y default-libmysqlclient-dev
|
|
||||||
|
|
||||||
COPY --from=runtime-image ["/azure-functions-host", "/azure-functions-host"]
|
|
||||||
COPY --from=runtime-image [ "/workers/python", "/azure-functions-host/workers/python" ]
|
|
||||||
COPY --from=runtime-image [ "/FuncExtensionBundles", "/FuncExtensionBundles" ]
|
|
||||||
|
|
||||||
RUN pip install grpcio grpcio-tools
|
|
||||||
|
|
||||||
RUN cp -r /azure-functions-host/workers/python/3.8 /azure-functions-host/workers/python/3.9 && \
|
|
||||||
rm -r /azure-functions-host/workers/python/3.9/LINUX/X64/grpc && \
|
|
||||||
rm -r /azure-functions-host/workers/python/3.9/LINUX/X64/grpcio-1.26.0.dist-info && \
|
|
||||||
cp -r /usr/local/lib/python3.9/site-packages/grpc /azure-functions-host/workers/python/3.9/LINUX/X64/ && \
|
|
||||||
cp -r /usr/local/lib/python3.9/site-packages/grpcio-1.32.0.dist-info /azure-functions-host/workers/python/3.9/LINUX/X64/ && \
|
|
||||||
cp -r /usr/local/lib/python3.9/site-packages/grpc_tools /azure-functions-host/workers/python/3.9/LINUX/X64/ && \
|
|
||||||
cp -r /usr/local/lib/python3.9/site-packages/grpcio_tools-1.32.0.dist-info /azure-functions-host/workers/python/3.9/LINUX/X64/
|
|
||||||
|
|
||||||
RUN cd /azure-functions-host/workers/python/ && \
|
|
||||||
sed -i 's/3.8"]/3.8", "3.9"]/g' worker.config.json
|
|
||||||
|
|
||||||
RUN cd /azure-functions-host/workers/python/3.9 && \
|
|
||||||
sed -i 's/asyncio.Task.current_task/asyncio.current_task/g' OSX/X64/azure_functions_worker/dispatcher.py && \
|
|
||||||
sed -i 's/asyncio.Task.current_task/asyncio.current_task/g' LINUX/X64/azure_functions_worker/dispatcher.py
|
|
||||||
|
|
||||||
# Mounting local machines azure-functions-python-worker and azure-functions-python-library onto it
|
|
||||||
RUN rm -rf /azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker
|
|
||||||
|
|
||||||
# Use the following command to run the docker image with customizible worker and library
|
|
||||||
VOLUME ["/azure-functions-host/workers/python/3.8/LINUX/X64/azure_functions_worker"]
|
|
||||||
|
|
||||||
RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 379CE192D401AB61 && \
|
|
||||||
echo "deb https://dl.bintray.com/loadimpact/deb stable main" | tee -a /etc/apt/sources.list && \
|
|
||||||
apt-get update && \
|
|
||||||
apt-get install -y git k6 procps && \
|
|
||||||
# Procps is required for displaying worker and profiling processes info
|
|
||||||
cd /home && \
|
|
||||||
git clone https://github.com/vrdmr/AzFunctionsPythonPerformance.git && \
|
|
||||||
mkdir -p /home/site/wwwroot/ && \
|
|
||||||
cp -r AzFunctionsPythonPerformance/* /home/site/wwwroot/ && \
|
|
||||||
pip install -r /home/site/wwwroot/requirements.txt
|
|
||||||
|
|
||||||
ENV FUNCTIONS_WORKER_RUNTIME_VERSION=3.9 \
|
|
||||||
AzureWebJobsScriptRoot=/home/site/wwwroot \
|
|
||||||
AzureFunctionsJobHost__Logging__Console__IsEnabled=true \
|
|
||||||
FUNCTIONS_WORKER_PROCESS_COUNT=1 \
|
|
||||||
AZURE_FUNCTIONS_ENVIRONMENT=Development
|
|
||||||
|
|
||||||
CMD [ "/azure-functions-host/Microsoft.Azure.WebJobs.Script.WebHost" ]
|
|
|
@ -1,47 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 50 VUs during first minute
|
|
||||||
{ target: 25, duration: "1m" },
|
|
||||||
// Hold at 50 VUs for the next 3 minutes and 30 seconds
|
|
||||||
{ target: 25, duration: "3m45s" },
|
|
||||||
// Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~5 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(95)<5000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
"check_failure_rate": [
|
|
||||||
// Global failure rate should be less than 1%
|
|
||||||
"rate<0.01",
|
|
||||||
// Abort the test early if it climbs over 5%
|
|
||||||
{ threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerCPUIntensive`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 50 VUs during first minute
|
|
||||||
{ target: 25, duration: "1m" },
|
|
||||||
// Hold at 50 VUs for the next 3 minutes and 30 seconds
|
|
||||||
{ target: 25, duration: "3m45s" },
|
|
||||||
// Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~5 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(95)<5000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
"check_failure_rate": [
|
|
||||||
// Global failure rate should be less than 1%
|
|
||||||
"rate<0.01",
|
|
||||||
// Abort the test early if it climbs over 5%
|
|
||||||
{ threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerCPUIntensiveWithSleeps`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,48 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 50 VUs during first minute
|
|
||||||
{ target: 100, duration: "30s" },
|
|
||||||
// Hold at 50 VUs for the next 3 minutes and 30 seconds
|
|
||||||
{ target: 100, duration: "4m15s" },
|
|
||||||
// Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~5 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(95)<5000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
"check_failure_rate": [
|
|
||||||
// Global failure rate should be less than 1%
|
|
||||||
"rate<0.01",
|
|
||||||
// Abort the test early if it climbs over 5%
|
|
||||||
{ threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerHelloWorld`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200,
|
|
||||||
"content is present": (r) => r.body.indexOf("This HTTP triggered function executed successfully") !== -1,
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 100 VUs during first minute
|
|
||||||
{ target: 100, duration: "30s" },
|
|
||||||
// Hold at 100 VUs for the next 1 minutes and 15 seconds
|
|
||||||
{ target: 100, duration: "1m15s" },
|
|
||||||
// Linearly ramp down from 100 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~2 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(95)<5000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
// "check_failure_rate": [
|
|
||||||
// // Global failure rate should be less than 1%
|
|
||||||
// "rate<0.01",
|
|
||||||
// // Abort the test early if it climbs over 5%
|
|
||||||
// { threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
// ],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerWithAsyncRequest`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 50 VUs during first minute
|
|
||||||
{ target: 100, duration: "30s" },
|
|
||||||
// Hold at 50 VUs for the next 3 minutes and 30 seconds
|
|
||||||
{ target: 100, duration: "4m15s" },
|
|
||||||
// Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~5 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(95)<5000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
"check_failure_rate": [
|
|
||||||
// Global failure rate should be less than 1%
|
|
||||||
"rate<0.01",
|
|
||||||
// Abort the test early if it climbs over 5%
|
|
||||||
{ threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerWithSyncRequests`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,65 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
var INPUT_FILENAME = 'Input_256MB'
|
|
||||||
var CONTENT_SIZE = 1024 * 1024 * 256; // 256 MB
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 20 VUs during first minute
|
|
||||||
{ target: 20, duration: "1m" },
|
|
||||||
// Hold at 20 VUs for the next 3 minutes and 45 seconds
|
|
||||||
{ target: 20, duration: "3m45s" },
|
|
||||||
// Linearly ramp down from 20 to 0 VUs over the last 15 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~5 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 40s
|
|
||||||
"http_req_duration": ["p(95)<40000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
"check_failure_rate": [
|
|
||||||
// Global failure rate should be less than 1%
|
|
||||||
"rate<0.01",
|
|
||||||
// Abort the test early if it climbs over 5%
|
|
||||||
{ threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Setup function
|
|
||||||
// This will create a blob which will later be used as an input binding
|
|
||||||
export function setup() {
|
|
||||||
let no_random_input = true;
|
|
||||||
let url = `${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncPutBlobAsBytesReturnHttpResponse?content_size=${CONTENT_SIZE}&no_random_input=${no_random_input}&outfile=${INPUT_FILENAME}`;
|
|
||||||
let response = http.get(url);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200,
|
|
||||||
"content_size matches": (r) => r.json().content_size === CONTENT_SIZE,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let url = `${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncGetBlobAsBytesReturnHttpResponse?infile=${INPUT_FILENAME}`;
|
|
||||||
let response = http.get(url);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200,
|
|
||||||
"content_size matches": (r) => r.json().content_size === CONTENT_SIZE,
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,48 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 100 VUs during first minute
|
|
||||||
{ target: 100, duration: "30s" },
|
|
||||||
// Hold at 100 VUs for the next 1 minutes and 15 seconds
|
|
||||||
{ target: 100, duration: "1m15s" },
|
|
||||||
// Linearly ramp down from 100 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~2 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(50)<5000"]
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
// "check_failure_rate": [
|
|
||||||
// // Global failure rate should be less than 1%
|
|
||||||
// "rate<0.01",
|
|
||||||
// // Abort the test early if it climbs over 5%
|
|
||||||
// { threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
// ],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHelloWorld`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200,
|
|
||||||
// "content is present": (r) => r.body.indexOf("This HTTP triggered function executed successfully") !== -1,
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 25 VUs during first minute
|
|
||||||
{ target: 25, duration: "1m" },
|
|
||||||
// Hold at 25 VUs for the next 1 minutes and 30 seconds
|
|
||||||
{ target: 25, duration: "1m45s" },
|
|
||||||
// Linearly ramp down from 25 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~3 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(10)<5000"],
|
|
||||||
// // Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
// "check_failure_rate": [
|
|
||||||
// // Global failure rate should be less than 1%
|
|
||||||
// "rate<0.01",
|
|
||||||
// // Abort the test early if it climbs over 5%
|
|
||||||
// { threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
// ],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerCPUIntensive`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 50 VUs during first minute
|
|
||||||
{ target: 25, duration: "1m" },
|
|
||||||
// Hold at 50 VUs for the next 3 minutes and 30 seconds
|
|
||||||
{ target: 25, duration: "3m45s" },
|
|
||||||
// Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~5 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(95)<5000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
"check_failure_rate": [
|
|
||||||
// Global failure rate should be less than 1%
|
|
||||||
"rate<0.01",
|
|
||||||
// Abort the test early if it climbs over 5%
|
|
||||||
{ threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerCPUIntensiveWithSleeps`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,50 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
const fileContents = open('./largish_body.html')
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 50 VUs during first minute
|
|
||||||
{ target: 50, duration: "1m" },
|
|
||||||
// Hold at 50 VUs for the next 3 minutes and 30 seconds
|
|
||||||
{ target: 50, duration: "3m45s" },
|
|
||||||
// Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~5 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(95)<5000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
"check_failure_rate": [
|
|
||||||
// Global failure rate should be less than 1%
|
|
||||||
"rate<0.01",
|
|
||||||
// Abort the test early if it climbs over 5%
|
|
||||||
{ threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.post(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerHtmlParser`, fileContents);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200,
|
|
||||||
"content is present": (r) => r.body.indexOf("StartTagCount") !== -1,
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 50 VUs during first minute
|
|
||||||
{ target: 50, duration: "1m" },
|
|
||||||
// Hold at 50 VUs for the next 3 minutes and 30 seconds
|
|
||||||
{ target: 50, duration: "3m45s" },
|
|
||||||
// Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~5 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(50)<5000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
// "check_failure_rate": [
|
|
||||||
// // Global failure rate should be less than 1%
|
|
||||||
// "rate<0.01",
|
|
||||||
// // Abort the test early if it climbs over 5%
|
|
||||||
// { threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
// ],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerWithMixWorkloads`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,49 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 100 VUs during first minute
|
|
||||||
{ target: 100, duration: "30s" },
|
|
||||||
// Hold at 100 VUs for the next 1 minutes and 15 seconds
|
|
||||||
{ target: 100, duration: "1m15s" },
|
|
||||||
// Linearly ramp down from 100 to 0 50 VUs over the last 30 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~2 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 500ms
|
|
||||||
"http_req_duration": ["p(10)<5000"],
|
|
||||||
// Requests with the staticAsset tag should finish even faster
|
|
||||||
// "http_req_duration{staticAsset:yes}": ["p(99)<250"],
|
|
||||||
// // Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
// "check_failure_rate": [
|
|
||||||
// // Global failure rate should be less than 1%
|
|
||||||
// "rate<0.01",
|
|
||||||
// // Abort the test early if it climbs over 5%
|
|
||||||
// { threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
// ],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerWithSyncRequests`);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,53 +0,0 @@
|
||||||
import { check } from "k6";
|
|
||||||
import { Rate } from "k6/metrics";
|
|
||||||
import http from "k6/http";
|
|
||||||
import { randomIntBetween } from "https://jslib.k6.io/k6-utils/1.0.0/index.js";
|
|
||||||
|
|
||||||
var HOSTNAME = __ENV.HOSTNAME || 'localhost';
|
|
||||||
var PORT = __ENV.PORT || '80';
|
|
||||||
var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https');
|
|
||||||
|
|
||||||
// A custom metric to track failure rates
|
|
||||||
var failureRate = new Rate("check_failure_rate");
|
|
||||||
|
|
||||||
// Options
|
|
||||||
export let options = {
|
|
||||||
stages: [
|
|
||||||
// Linearly ramp up from 1 to 50 VUs during first minute
|
|
||||||
{ target: 50, duration: "1m" },
|
|
||||||
// Hold at 50 VUs for the next 3 minutes and 45 seconds
|
|
||||||
{ target: 50, duration: "3m45s" },
|
|
||||||
// Linearly ramp down from 50 to 0 VUs over the last 15 seconds
|
|
||||||
{ target: 0, duration: "15s" }
|
|
||||||
// Total execution time will be ~5 minutes
|
|
||||||
],
|
|
||||||
thresholds: {
|
|
||||||
// We want the 95th percentile of all HTTP request durations to be less than 40s
|
|
||||||
"http_req_duration": ["p(95)<40000"],
|
|
||||||
// Thresholds based on the custom metric we defined and use to track application failures
|
|
||||||
"check_failure_rate": [
|
|
||||||
// Global failure rate should be less than 1%
|
|
||||||
"rate<0.01",
|
|
||||||
// Abort the test early if it climbs over 5%
|
|
||||||
{ threshold: "rate<=0.05", abortOnFail: true },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main function
|
|
||||||
export default function () {
|
|
||||||
let content_size = 1024 * 1024 * 256; // 256 MB
|
|
||||||
let no_random_input = true;
|
|
||||||
let outfile = randomIntBetween(1,500000);
|
|
||||||
let url = `${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncPutBlobAsBytesReturnHttpResponse?content_size=${content_size}&no_random_input=${no_random_input}&outfile=${outfile}`;
|
|
||||||
let response = http.get(url);
|
|
||||||
|
|
||||||
// check() returns false if any of the specified conditions fail
|
|
||||||
let checkRes = check(response, {
|
|
||||||
"status is 200": (r) => r.status === 200,
|
|
||||||
"content_size matches": (r) => r.json().content_size === content_size,
|
|
||||||
});
|
|
||||||
|
|
||||||
// We reverse the check() result since we want to count the failures
|
|
||||||
failureRate.add(!checkRes);
|
|
||||||
}
|
|
|
@ -1,18 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
HOST=$1
|
|
||||||
PORT=$2
|
|
||||||
PERF_TESTS_LINK=$3
|
|
||||||
TEST_TO_RUN=$4
|
|
||||||
PROTOCOL=http
|
|
||||||
|
|
||||||
runk6tests () {
|
|
||||||
PROTOCOL=$PROTOCOL HOSTNAME=$1 PORT=$2 ./k6 run --summary-export=test-summary.json -q $PERF_TESTS_LINK/$TEST_TO_RUN.js
|
|
||||||
}
|
|
||||||
|
|
||||||
printresults () {
|
|
||||||
cat test-summary.json
|
|
||||||
}
|
|
||||||
|
|
||||||
runk6tests "$HOST" "$PORT"
|
|
||||||
#printresults
|
|
|
@ -1,71 +0,0 @@
|
||||||
# This workflow will run all tests in tests/consumption_tests in Docker using a consumption image
|
|
||||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
|
||||||
|
|
||||||
name: CI Consumption E2E tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
custom_image:
|
|
||||||
description: "Use a custom image to run consumption tests"
|
|
||||||
required: false
|
|
||||||
push:
|
|
||||||
branches: [ dev, main, release/* ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ dev, main, release/* ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: "Python Consumption CI Run"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version: [ 3.7, 3.8, 3.9, "3.10", "3.11" ]
|
|
||||||
permissions: read-all
|
|
||||||
steps:
|
|
||||||
- name: Checkout code.
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre
|
|
||||||
python -m pip install -U -e .[dev]
|
|
||||||
if [[ "${{ matrix.python-version }}" != "3.7" ]]; then
|
|
||||||
python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-http-v2]
|
|
||||||
fi
|
|
||||||
python setup.py build
|
|
||||||
- name: Running 3.7 Tests
|
|
||||||
if: matrix.python-version == 3.7
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }}
|
|
||||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
|
||||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
|
||||||
- name: Running 3.8 Tests
|
|
||||||
if: matrix.python-version == 3.8
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }}
|
|
||||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
|
||||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
|
||||||
- name: Running 3.9 Tests
|
|
||||||
if: matrix.python-version == 3.9
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }}
|
|
||||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
|
||||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
|
||||||
- name: Running 3.10 Tests
|
|
||||||
if: matrix.python-version == 3.10
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }}
|
|
||||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
|
||||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
|
||||||
- name: Running 3.11 Tests
|
|
||||||
if: matrix.python-version == 3.11
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }}
|
|
||||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
|
||||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
|
|
@ -1,92 +0,0 @@
|
||||||
# This workflow will run all tests in endtoend/tests in a docker container using the latest consumption image
|
|
||||||
|
|
||||||
name: CI Docker Consumption tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
|
||||||
# Run everyday at 5 AM CST
|
|
||||||
- cron: "0 10 * * *"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: "Python Docker CI Run"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version: [ 3.7, 3.8, 3.9, "3.10", "3.11" ]
|
|
||||||
permissions: read-all
|
|
||||||
env:
|
|
||||||
CONSUMPTION_DOCKER_TEST: "true"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code.
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python setup.py build
|
|
||||||
- name: Running 3.7 Tests
|
|
||||||
if: matrix.python-version == 3.7
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString37 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString37 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString37 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString37 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString37 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString37 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.8 Tests
|
|
||||||
if: matrix.python-version == 3.8
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString38 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString38 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString38 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString38 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString38 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString38 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.9 Tests
|
|
||||||
if: matrix.python-version == 3.9
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString39 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString39 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString39 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString39 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString39 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString39 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.10 Tests
|
|
||||||
if: matrix.python-version == 3.10
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString310 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString310 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString310 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString310 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString310 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString310 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.11 Tests
|
|
||||||
if: matrix.python-version == 3.11
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString311 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString311 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString311 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString311 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString311 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString311 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
|
@ -1,112 +0,0 @@
|
||||||
# This workflow will run all tests in endtoend/tests in a docker container using custom dedicated or consumption image
|
|
||||||
|
|
||||||
|
|
||||||
name: CI Docker Custom tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
image_name:
|
|
||||||
description: 'Image'
|
|
||||||
required: true
|
|
||||||
python_version:
|
|
||||||
description: 'Python Version'
|
|
||||||
required: true
|
|
||||||
type: choice
|
|
||||||
default: '3.11'
|
|
||||||
options: [ '3.7', '3.8', '3.9', '3.10', '3.11' ]
|
|
||||||
DEDICATED_DOCKER_TEST:
|
|
||||||
description: 'Is this Dedicated Image?'
|
|
||||||
required: true
|
|
||||||
type: choice
|
|
||||||
default: 'true'
|
|
||||||
options: [ 'true', 'false' ]
|
|
||||||
CONSUMPTION_DOCKER_TEST:
|
|
||||||
description: 'Is this Consumption Image?'
|
|
||||||
required: true
|
|
||||||
type: choice
|
|
||||||
default: 'false'
|
|
||||||
options: [ 'true', 'false' ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: "Python Docker CI Run"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
permissions: read-all
|
|
||||||
env:
|
|
||||||
DEDICATED_DOCKER_TEST: ${{ github.event.inputs.DEDICATED_DOCKER_TEST }}
|
|
||||||
CONSUMPTION_DOCKER_TEST: ${{ github.event.inputs.CONSUMPTION_DOCKER_TEST }}
|
|
||||||
python_version: ${{ github.event.inputs.python_version }}
|
|
||||||
IMAGE_NAME: ${{ github.event.inputs.image_name }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout code.
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Set up Python ${{ env.python_version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.python_version }}
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python setup.py build
|
|
||||||
- name: Running 3.7 Tests
|
|
||||||
if: env.python_version == 3.7
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString37 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString37 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString37 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString37 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString37 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString37 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.8 Tests
|
|
||||||
if: env.python_version == 3.8
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString38 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString38 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString38 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString38 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString38 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString38 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.9 Tests
|
|
||||||
if: env.python_version == 3.9
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString39 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString39 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString39 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString39 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString39 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString39 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.10 Tests
|
|
||||||
if: env.python_version == 3.10
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString310 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString310 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString310 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString310 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString310 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString310 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.11 Tests
|
|
||||||
if: env.python_version == 3.11
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString311 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString311 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString311 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString311 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString311 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString311 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend
|
|
|
@ -1,93 +0,0 @@
|
||||||
# This workflow will run all tests in endtoend/tests in a docker container using the latest dedicated image
|
|
||||||
|
|
||||||
name: CI Docker Dedicated tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
schedule:
|
|
||||||
# Run everyday at 4 AM CST
|
|
||||||
- cron: "0 9 * * *"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: "Python Docker CI Run"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version: [ 3.7, 3.8, 3.9, "3.10", "3.11" ]
|
|
||||||
permissions: read-all
|
|
||||||
env:
|
|
||||||
DEDICATED_DOCKER_TEST: "true"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code.
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python setup.py build
|
|
||||||
- name: Running 3.7 Tests
|
|
||||||
if: matrix.python-version == 3.7
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString37 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString37 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString37 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString37 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString37 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString37 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.8 Tests
|
|
||||||
if: matrix.python-version == 3.8
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString38 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString38 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString38 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString38 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString38 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString38 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.9 Tests
|
|
||||||
if: matrix.python-version == 3.9
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString39 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString39 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString39 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString39 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString39 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString39 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.10 Tests
|
|
||||||
if: matrix.python-version == 3.10
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString310 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString310 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString310 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString310 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString310 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString310 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
- name: Running 3.11 Tests
|
|
||||||
if: matrix.python-version == 3.11
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString311 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString311 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString311 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString311 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString311 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString311 }}
|
|
||||||
run: |
|
|
||||||
python -m pytest --reruns 4 -vv --instafail tests/endtoend
|
|
||||||
|
|
|
@ -1,161 +0,0 @@
|
||||||
# This workflow will install Python dependencies and run end to end tests with single version of Python
|
|
||||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
|
||||||
|
|
||||||
name: CI E2E tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
archive_webhost_logging:
|
|
||||||
description: "For debugging purposes, archive test webhost logs"
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
push:
|
|
||||||
branches: [dev, main, release/*]
|
|
||||||
pull_request:
|
|
||||||
branches: [dev, main, release/*]
|
|
||||||
schedule:
|
|
||||||
# Monday to Friday 3 AM CST build
|
|
||||||
# * is a special character in YAML so you have to quote this string
|
|
||||||
- cron: "0 8 * * 1,2,3,4,5"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: "Python E2E CI Run"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version: [3.7, 3.8, 3.9, "3.10", "3.11"]
|
|
||||||
# Currently runs FWPC E2E tests, classic E2E tests, and DefBindings E2E tests.
|
|
||||||
# To run tests from another script, add the script name to this matrix
|
|
||||||
test-type: [fwpc-e2e-tests, e2e-tests, deferred-bindings-e2e-tests]
|
|
||||||
permissions: read-all
|
|
||||||
steps:
|
|
||||||
- name: Checkout code.
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Set up Dotnet 8.0.x
|
|
||||||
uses: actions/setup-dotnet@v4
|
|
||||||
with:
|
|
||||||
dotnet-version: "8.0.x"
|
|
||||||
- name: Install dependencies and the worker
|
|
||||||
run: |
|
|
||||||
retry() {
|
|
||||||
local -r -i max_attempts="$1"; shift
|
|
||||||
local -r cmd="$@"
|
|
||||||
local -i attempt_num=1
|
|
||||||
until $cmd
|
|
||||||
do
|
|
||||||
if (( attempt_num == max_attempts ))
|
|
||||||
then
|
|
||||||
echo "Attempt $attempt_num failed and there are no more attempts left!"
|
|
||||||
return 1
|
|
||||||
else
|
|
||||||
echo "Attempt $attempt_num failed! Trying again in $attempt_num seconds..."
|
|
||||||
sleep 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre
|
|
||||||
python -m pip install -U -e .[dev]
|
|
||||||
|
|
||||||
if [[ "${{ matrix.python-version }}" != "3.7" ]]; then
|
|
||||||
python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-http-v2]
|
|
||||||
fi
|
|
||||||
if [[ "${{ matrix.python-version }}" != "3.7" && "${{ matrix.python-version }}" != "3.8" ]]; then
|
|
||||||
python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-deferred-bindings]
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Retry a couple times to avoid certificate issue
|
|
||||||
retry 5 python setup.py build
|
|
||||||
retry 5 python setup.py webhost --branch-name=dev
|
|
||||||
retry 5 python setup.py extension
|
|
||||||
mkdir logs
|
|
||||||
- name: Grant execute permission
|
|
||||||
run: |
|
|
||||||
chmod +x .github/Scripts/${{ matrix.test-type }}.sh
|
|
||||||
- name: Running 3.7 ${{ matrix.test-type }}
|
|
||||||
if: matrix.python-version == 3.7
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString37 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString37 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString37 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString37 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString37 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString37 }}
|
|
||||||
ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }}
|
|
||||||
run: |
|
|
||||||
.github/Scripts/${{ matrix.test-type }}.sh
|
|
||||||
- name: Running 3.8 ${{ matrix.test-type }}
|
|
||||||
if: matrix.python-version == 3.8
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString38 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString38 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString38 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString38 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString38 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString38 }}
|
|
||||||
ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }}
|
|
||||||
run: |
|
|
||||||
.github/Scripts/${{ matrix.test-type }}.sh
|
|
||||||
- name: Running 3.9 ${{ matrix.test-type }}
|
|
||||||
if: matrix.python-version == 3.9
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString39 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString39 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString39 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString39 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString39 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString39 }}
|
|
||||||
ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }}
|
|
||||||
run: |
|
|
||||||
.github/Scripts/${{ matrix.test-type }}.sh
|
|
||||||
- name: Running 3.10 ${{ matrix.test-type }}
|
|
||||||
if: matrix.python-version == 3.10
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString310 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString310 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString310 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString310 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString310 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString310 }}
|
|
||||||
ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }}
|
|
||||||
run: |
|
|
||||||
.github/Scripts/${{ matrix.test-type }}.sh
|
|
||||||
- name: Running 3.11 ${{ matrix.test-type }}
|
|
||||||
if: matrix.python-version == 3.11
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }}
|
|
||||||
AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString311 }}
|
|
||||||
AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString311 }}
|
|
||||||
AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString311 }}
|
|
||||||
AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString311 }}
|
|
||||||
AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString311 }}
|
|
||||||
AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString311 }}
|
|
||||||
ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }}
|
|
||||||
run: |
|
|
||||||
.github/Scripts/${{ matrix.test-type }}.sh
|
|
||||||
- name: Codecov
|
|
||||||
uses: codecov/codecov-action@v3
|
|
||||||
with:
|
|
||||||
file: ./coverage.xml # optional
|
|
||||||
flags: unittests # optional
|
|
||||||
name: codecov # optional
|
|
||||||
fail_ci_if_error: false # optional (default = false)
|
|
||||||
- name: Publish Logs to Artifact
|
|
||||||
if: failure()
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: Test WebHost Logs ${{ github.run_id }} ${{ matrix.python-version }}
|
|
||||||
path: logs/*.log
|
|
||||||
if-no-files-found: ignore
|
|
|
@ -1,91 +0,0 @@
|
||||||
# This workflow will install Python dependencies, run tests and lint with a single version of Python
|
|
||||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
|
||||||
|
|
||||||
name: CI Unit tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
archive_webhost_logging:
|
|
||||||
description: "For debugging purposes, archive test webhost logs"
|
|
||||||
required: false
|
|
||||||
default: "false"
|
|
||||||
schedule:
|
|
||||||
# Monday to Thursday 3 AM CST build
|
|
||||||
# * is a special character in YAML so you have to quote this string
|
|
||||||
- cron: "0 8 * * 1,2,3,4"
|
|
||||||
push:
|
|
||||||
pull_request:
|
|
||||||
branches: [ dev, main, release/* ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
name: "Python UT CI Run"
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version: [ 3.7, 3.8, 3.9, "3.10", "3.11" ]
|
|
||||||
permissions: read-all
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Set up Dotnet 8.0.x
|
|
||||||
uses: actions/setup-dotnet@v4
|
|
||||||
with:
|
|
||||||
dotnet-version: "8.0.x"
|
|
||||||
- name: Install dependencies and the worker
|
|
||||||
run: |
|
|
||||||
retry() {
|
|
||||||
local -r -i max_attempts="$1"; shift
|
|
||||||
local -r cmd="$@"
|
|
||||||
local -i attempt_num=1
|
|
||||||
until $cmd
|
|
||||||
do
|
|
||||||
if (( attempt_num == max_attempts ))
|
|
||||||
then
|
|
||||||
echo "Attempt $attempt_num failed and there are no more attempts left!"
|
|
||||||
return 1
|
|
||||||
else
|
|
||||||
echo "Attempt $attempt_num failed! Trying again in $attempt_num seconds..."
|
|
||||||
sleep 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre
|
|
||||||
|
|
||||||
python -m pip install -U -e .[dev]
|
|
||||||
if [[ "${{ matrix.python-version }}" != "3.7" ]]; then
|
|
||||||
python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-http-v2]
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Retry a couple times to avoid certificate issue
|
|
||||||
retry 5 python setup.py build
|
|
||||||
retry 5 python setup.py webhost --branch-name=dev
|
|
||||||
retry 5 python setup.py extension
|
|
||||||
mkdir logs
|
|
||||||
- name: Test with pytest
|
|
||||||
env:
|
|
||||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }} # needed for installing azure-functions-durable while running setup.py
|
|
||||||
ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }}
|
|
||||||
run: |
|
|
||||||
python -m pytest -q -n auto --dist loadfile --reruns 4 --instafail --cov=./azure_functions_worker --cov-report xml --cov-branch tests/unittests
|
|
||||||
- name: Codecov
|
|
||||||
uses: codecov/codecov-action@v3
|
|
||||||
with:
|
|
||||||
file: ./coverage.xml # optional
|
|
||||||
flags: unittests # optional
|
|
||||||
name: codecov # optional
|
|
||||||
fail_ci_if_error: false # optional (default = false)
|
|
||||||
- name: Publish Logs to Artifact
|
|
||||||
if: failure()
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: Test WebHost Logs ${{ github.run_id }} ${{ matrix.python-version }}
|
|
||||||
path: logs/*.log
|
|
||||||
if-no-files-found: ignore
|
|
|
@ -1,70 +0,0 @@
|
||||||
# For most projects, this workflow file will not need changing; you simply need
|
|
||||||
# to commit it to your repository.
|
|
||||||
#
|
|
||||||
# You may wish to alter this file to override the set of languages analyzed,
|
|
||||||
# or to provide custom queries or build logic.
|
|
||||||
#
|
|
||||||
# ******** NOTE ********
|
|
||||||
# We have attempted to detect the languages in your repository. Please check
|
|
||||||
# the `language` matrix defined below to confirm you have the correct set of
|
|
||||||
# supported CodeQL languages.
|
|
||||||
#
|
|
||||||
name: "CodeQL"
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ dev, v3.x-dev, main*, release* ]
|
|
||||||
pull_request:
|
|
||||||
# The branches below must be a subset of the branches above
|
|
||||||
branches: [ dev ]
|
|
||||||
schedule:
|
|
||||||
- cron: '25 11 * * 5'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
analyze:
|
|
||||||
name: Analyze
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
actions: read
|
|
||||||
contents: read
|
|
||||||
security-events: write
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
language: [ 'python' ]
|
|
||||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
|
||||||
# Learn more about CodeQL language support at https://git.io/codeql-language-support
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v2
|
|
||||||
with:
|
|
||||||
languages: ${{ matrix.language }}
|
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
|
||||||
# By default, queries listed here will override any specified in a config file.
|
|
||||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
|
||||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
|
||||||
- name: Autobuild
|
|
||||||
uses: github/codeql-action/autobuild@v2
|
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
|
||||||
# 📚 https://git.io/JvXDl
|
|
||||||
|
|
||||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
|
||||||
# and modify them (or add more) to build your code if your project
|
|
||||||
# uses a compiled language
|
|
||||||
|
|
||||||
#- run: |
|
|
||||||
# make bootstrap
|
|
||||||
# make release
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
|
||||||
uses: github/codeql-action/analyze@v2
|
|
|
@ -1,137 +0,0 @@
|
||||||
name: Throughput testing workflow
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
profiling_sampling_rate:
|
|
||||||
description: 'Profiling sampling rate (tps)'
|
|
||||||
required: false
|
|
||||||
default: '500'
|
|
||||||
test_to_run:
|
|
||||||
description: 'List of perf tests to run'
|
|
||||||
required: false
|
|
||||||
default: SyncHelloWorld
|
|
||||||
report_format:
|
|
||||||
description: 'Format of profiling report'
|
|
||||||
type: choice
|
|
||||||
required: true
|
|
||||||
options:
|
|
||||||
- speedscope
|
|
||||||
- flamegraph
|
|
||||||
issue_comment:
|
|
||||||
types: [created]
|
|
||||||
env:
|
|
||||||
TESTS_DIR_PATH: ".ci/perf_tests/k6scripts/"
|
|
||||||
PYTHON_VERSION: "3.10"
|
|
||||||
PYTHON_FUNCTION_PROFILING_STORAGE_ACCT: "azpyfuncpipelinestorage"
|
|
||||||
PORT: 8000
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
if: ${{ github.event_name == 'workflow_dispatch' || github.event.issue.pull_request != null && contains(github.event.comment.body, '/profile') }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions: read-all
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
test_to_run: ['${{ github.event.inputs.test_to_run }}']
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v2
|
|
||||||
- name: Set up Python ${{ env.PYTHON_VERSION }}
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: ${{ env.PYTHON_VERSION }}
|
|
||||||
- name: Setup k6 for throughput testing
|
|
||||||
run: |
|
|
||||||
cd $GITHUB_WORKSPACE
|
|
||||||
curl https://github.com/loadimpact/k6/releases/download/v0.28.0/k6-v0.28.0-linux64.tar.gz -L | tar xvz --strip-components 1
|
|
||||||
chmod 755 ./k6
|
|
||||||
./k6 version
|
|
||||||
- name: Install dependencies and the worker
|
|
||||||
run: |
|
|
||||||
python -m pip install -q --upgrade pip
|
|
||||||
python -m pip install -q --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U -e .[dev]
|
|
||||||
python -m pip install -q --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre
|
|
||||||
python setup.py build
|
|
||||||
python setup.py webhost --branch-name=dev
|
|
||||||
python setup.py extension
|
|
||||||
- name: Build and Run the Docker image
|
|
||||||
run: |
|
|
||||||
echo "Building Docker image with Python version ${{ env.PYTHON_VERSION }}"
|
|
||||||
docker build --build-arg PYTHON_VERSION=${{ env.PYTHON_VERSION }} --file .ci/perf_tests/dockerfiles/perf_tests.Dockerfile --tag perfimage:latest .
|
|
||||||
|
|
||||||
echo "Running Docker container..."
|
|
||||||
container_id=$(docker run -d --privileged --env FUNCTIONS_WORKER_RUNTIME_VERSION=${{ env.PYTHON_VERSION }} -p ${PORT}:80 -v $GITHUB_WORKSPACE/azure_functions_worker:/azure-functions-host/workers/python/${{ env.PYTHON_VERSION }}/LINUX/X64/azure_functions_worker perfimage:latest)
|
|
||||||
sleep 10 # host needs some time to start.
|
|
||||||
echo "Container ID is $container_id"
|
|
||||||
|
|
||||||
echo "Fetching Docker container logs..."
|
|
||||||
docker logs $container_id
|
|
||||||
worker_pid=$(docker exec $container_id sh -c "ps aux | grep '[p]ython'" | awk '{print $2}')
|
|
||||||
echo "Python worker process id is $worker_pid"
|
|
||||||
|
|
||||||
echo "container_id=$container_id" >> $GITHUB_ENV
|
|
||||||
echo "worker_pid=$worker_pid" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Validate if the functions are now running
|
|
||||||
run: |
|
|
||||||
curl --get http://localhost:${PORT}/api/${{ matrix.test_to_run }}
|
|
||||||
|
|
||||||
- name: Start py-spy in the background
|
|
||||||
run: |
|
|
||||||
docker exec $container_id sh -c "pip install py-spy"
|
|
||||||
docker exec $container_id sh -c "mkdir /home/profiling_reports"
|
|
||||||
profiling_sampling_rate=${{ github.event.inputs.profiling_sampling_rate }}
|
|
||||||
report_format=${{ github.event.inputs.report_format }}
|
|
||||||
if [ "$report_format" == "flamegraph" ]; then
|
|
||||||
report_name="${{ github.run_id }}.svg"
|
|
||||||
elif [ "$report_format" == "speedscope" ]; then
|
|
||||||
report_name="${{ github.run_id }}.speedscope.json"
|
|
||||||
else
|
|
||||||
echo "Unsupported report format: $report_format"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
docker exec -d $container_id sh -c "RUST_BACKTRACE=1 py-spy record -p $worker_pid -o /home/profiling_reports/$report_name -f $report_format --idle --nonblocking --rate $profiling_sampling_rate > /home/site/wwwroot/py-spy.log 2>&1 &"
|
|
||||||
sleep 2 # Give it a moment to start
|
|
||||||
py_spy_id=$(docker exec $container_id sh -c "ps aux | grep '[p]y-spy record'" | awk '{print $2}')
|
|
||||||
|
|
||||||
echo "py_spy_id=$py_spy_id" >> $GITHUB_ENV
|
|
||||||
echo "report_name=$report_name" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Run Throughput tests
|
|
||||||
run: |
|
|
||||||
chmod 755 .ci/perf_tests/run-perftests.sh
|
|
||||||
.ci/perf_tests/run-perftests.sh localhost $PORT ${{ env.TESTS_DIR_PATH }} ${{ matrix.test_to_run }}
|
|
||||||
|
|
||||||
- name: Stop profiling and generate report
|
|
||||||
run: |
|
|
||||||
echo "Tests completed, terminating py-spy..."
|
|
||||||
docker exec $container_id cat /home/site/wwwroot/py-spy.log
|
|
||||||
docker exec $container_id sh -c "kill -2 $py_spy_id"
|
|
||||||
sleep 2
|
|
||||||
mkdir profiling_reports
|
|
||||||
chmod 777 profiling_reports
|
|
||||||
docker cp $container_id:/home/profiling_reports/$report_name profiling_reports
|
|
||||||
- name: Upload SVG to Azure Blob Storage
|
|
||||||
uses: bacongobbler/azure-blob-storage-upload@v3.0.0
|
|
||||||
with:
|
|
||||||
source_dir: 'profiling_reports' # Directory containing the $report_name file
|
|
||||||
container_name: 'profiling'
|
|
||||||
connection_string: ${{ secrets.AZURE_STORAGE_CONNECTION_STRING }}
|
|
||||||
sync: 'false'
|
|
||||||
|
|
||||||
- name: Output Blob URL
|
|
||||||
run: |
|
|
||||||
blob_url="https://${{ env.PYTHON_FUNCTION_PROFILING_STORAGE_ACCT }}.blob.core.windows.net/profiling/${{ env.report_name }}"
|
|
||||||
echo "You can view the Blob at: $blob_url"
|
|
||||||
|
|
||||||
- name: Upload profiling result to artifact
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: py-spy-output
|
|
||||||
path: 'profiling_reports/${{ env.report_name }}'
|
|
||||||
|
|
||||||
- name: Create Artifact Link
|
|
||||||
run: |
|
|
||||||
echo "You can download the SVG artifact from the Actions run page."
|
|
||||||
echo "Link to the Actions run page: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"
|
|
|
@ -201,6 +201,19 @@ def datum_as_proto(datum: Datum) -> protos.TypedData:
|
||||||
))
|
))
|
||||||
elif datum.type is None:
|
elif datum.type is None:
|
||||||
return None
|
return None
|
||||||
|
elif datum.type == 'dict':
|
||||||
|
# TypedData doesn't support dict, so we return it as json
|
||||||
|
return protos.TypedData(json=json.dumps(datum.value))
|
||||||
|
elif datum.type == 'list':
|
||||||
|
# TypedData doesn't support list, so we return it as json
|
||||||
|
return protos.TypedData(json=json.dumps(datum.value))
|
||||||
|
elif datum.type == 'int':
|
||||||
|
return protos.TypedData(int=datum.value)
|
||||||
|
elif datum.type == 'double':
|
||||||
|
return protos.TypedData(double=datum.value)
|
||||||
|
elif datum.type == 'bool':
|
||||||
|
# TypedData doesn't support bool, so we return it as an int
|
||||||
|
return protos.TypedData(int=int(datum.value))
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
'unexpected Datum type: {!r}'.format(datum.type)
|
'unexpected Datum type: {!r}'.format(datum.type)
|
||||||
|
|
|
@ -30,6 +30,16 @@ class GenericBinding:
|
||||||
return datumdef.Datum(type='bytes', value=bytes(obj))
|
return datumdef.Datum(type='bytes', value=bytes(obj))
|
||||||
elif obj is None:
|
elif obj is None:
|
||||||
return datumdef.Datum(type=None, value=obj)
|
return datumdef.Datum(type=None, value=obj)
|
||||||
|
elif isinstance(obj, dict):
|
||||||
|
return datumdef.Datum(type='dict', value=obj)
|
||||||
|
elif isinstance(obj, list):
|
||||||
|
return datumdef.Datum(type='list', value=obj)
|
||||||
|
elif isinstance(obj, int):
|
||||||
|
return datumdef.Datum(type='int', value=obj)
|
||||||
|
elif isinstance(obj, float):
|
||||||
|
return datumdef.Datum(type='double', value=obj)
|
||||||
|
elif isinstance(obj, bool):
|
||||||
|
return datumdef.Datum(type='bool', value=obj)
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
# Licensed under the MIT License.
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
VERSION = '4.26.0'
|
VERSION = '4.29.0'
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
trigger:
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
- release/*
|
||||||
|
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: eng
|
||||||
|
type: git
|
||||||
|
name: engineering
|
||||||
|
ref: refs/tags/release
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- template: /ci/variables/cfs.yml@eng
|
||||||
|
|
||||||
|
extends:
|
||||||
|
template: /ci/code-mirror.yml@eng
|
|
@ -0,0 +1,27 @@
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: 1es
|
||||||
|
type: git
|
||||||
|
name: 1ESPipelineTemplates/1ESPipelineTemplates
|
||||||
|
ref: refs/tags/release
|
||||||
|
- repository: eng
|
||||||
|
type: git
|
||||||
|
name: engineering
|
||||||
|
ref: refs/tags/release
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- template: /ci/variables/build.yml@eng
|
||||||
|
- template: /ci/variables/cfs.yml@eng
|
||||||
|
|
||||||
|
extends:
|
||||||
|
template: v1/1ES.Unofficial.PipelineTemplate.yml@1es
|
||||||
|
parameters:
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-windows-2022
|
||||||
|
os: windows
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: RunCoreToolsTests
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-core-tools-tests.yml@self
|
|
@ -0,0 +1,27 @@
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: 1es
|
||||||
|
type: git
|
||||||
|
name: 1ESPipelineTemplates/1ESPipelineTemplates
|
||||||
|
ref: refs/tags/release
|
||||||
|
- repository: eng
|
||||||
|
type: git
|
||||||
|
name: engineering
|
||||||
|
ref: refs/tags/release
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- template: /ci/variables/build.yml@eng
|
||||||
|
- template: /ci/variables/cfs.yml@eng
|
||||||
|
|
||||||
|
extends:
|
||||||
|
template: v1/1ES.Unofficial.PipelineTemplate.yml@1es
|
||||||
|
parameters:
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-windows-2022
|
||||||
|
os: windows
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: RunCustomDockerImageTests
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-custom-image-tests.yml@self
|
|
@ -0,0 +1,38 @@
|
||||||
|
# CI only, does not trigger on PRs.
|
||||||
|
pr: none
|
||||||
|
|
||||||
|
schedules:
|
||||||
|
- cron: "0 10 * * *"
|
||||||
|
displayName: Run everyday at 5 AM CST
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
always: true
|
||||||
|
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: 1es
|
||||||
|
type: git
|
||||||
|
name: 1ESPipelineTemplates/1ESPipelineTemplates
|
||||||
|
ref: refs/tags/release
|
||||||
|
- repository: eng
|
||||||
|
type: git
|
||||||
|
name: engineering
|
||||||
|
ref: refs/tags/release
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- template: /ci/variables/build.yml@eng
|
||||||
|
- template: /ci/variables/cfs.yml@eng
|
||||||
|
|
||||||
|
extends:
|
||||||
|
template: v1/1ES.Unofficial.PipelineTemplate.yml@1es
|
||||||
|
parameters:
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-windows-2022
|
||||||
|
os: windows
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: RunDockerConsumptionTests
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self
|
|
@ -0,0 +1,38 @@
|
||||||
|
# CI only, does not trigger on PRs.
|
||||||
|
pr: none
|
||||||
|
|
||||||
|
schedules:
|
||||||
|
- cron: "0 11 * * *"
|
||||||
|
displayName: Run everyday at 6 AM CST
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
always: true
|
||||||
|
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: 1es
|
||||||
|
type: git
|
||||||
|
name: 1ESPipelineTemplates/1ESPipelineTemplates
|
||||||
|
ref: refs/tags/release
|
||||||
|
- repository: eng
|
||||||
|
type: git
|
||||||
|
name: engineering
|
||||||
|
ref: refs/tags/release
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- template: /ci/variables/build.yml@eng
|
||||||
|
- template: /ci/variables/cfs.yml@eng
|
||||||
|
|
||||||
|
extends:
|
||||||
|
template: v1/1ES.Unofficial.PipelineTemplate.yml@1es
|
||||||
|
parameters:
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-windows-2022
|
||||||
|
os: windows
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: RunDockerDedicatedTests
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self
|
|
@ -0,0 +1,49 @@
|
||||||
|
trigger: none # ensure this is not ran as a CI build
|
||||||
|
|
||||||
|
pr:
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
- release/*
|
||||||
|
|
||||||
|
schedules:
|
||||||
|
- cron: "0 8 * * 1,2,3,4,5"
|
||||||
|
displayName: Monday to Friday 3 AM CST build
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
always: true
|
||||||
|
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: 1es
|
||||||
|
type: git
|
||||||
|
name: 1ESPipelineTemplates/1ESPipelineTemplates
|
||||||
|
ref: refs/tags/release
|
||||||
|
- repository: eng
|
||||||
|
type: git
|
||||||
|
name: engineering
|
||||||
|
ref: refs/tags/release
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- template: /ci/variables/build.yml@eng
|
||||||
|
- template: /ci/variables/cfs.yml@eng
|
||||||
|
|
||||||
|
extends:
|
||||||
|
template: v1/1ES.Unofficial.PipelineTemplate.yml@1es
|
||||||
|
parameters:
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-windows-2022
|
||||||
|
os: windows
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: RunE2ETests
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-e2e-tests.yml@self
|
||||||
|
- stage: RunLCTests
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-lc-tests.yml@self
|
||||||
|
- stage: RunUnitTests
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/jobs/ci-unit-tests.yml@self
|
|
@ -0,0 +1,66 @@
|
||||||
|
trigger:
|
||||||
|
batch: true
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
- release/*
|
||||||
|
|
||||||
|
# CI only, does not trigger on PRs.
|
||||||
|
pr: none
|
||||||
|
|
||||||
|
schedules:
|
||||||
|
- cron: '0 0 * * MON'
|
||||||
|
displayName: At 12:00 AM, only on Monday
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
always: true
|
||||||
|
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: 1es
|
||||||
|
type: git
|
||||||
|
name: 1ESPipelineTemplates/1ESPipelineTemplates
|
||||||
|
ref: refs/tags/release
|
||||||
|
- repository: eng
|
||||||
|
type: git
|
||||||
|
name: engineering
|
||||||
|
ref: refs/tags/release
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- template: /ci/variables/build.yml@eng
|
||||||
|
- template: /ci/variables/cfs.yml@eng
|
||||||
|
|
||||||
|
extends:
|
||||||
|
template: v1/1ES.Official.PipelineTemplate.yml@1es
|
||||||
|
parameters:
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-windows-2022
|
||||||
|
os: windows
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: Build
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/build-artifacts.yml@self
|
||||||
|
|
||||||
|
- stage: RunE2ETests
|
||||||
|
dependsOn: Build
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-e2e-tests.yml@self
|
||||||
|
- stage: RunUnitTests
|
||||||
|
dependsOn: Build
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/jobs/ci-unit-tests.yml@self
|
||||||
|
- stage: RunDockerConsumptionTests
|
||||||
|
dependsOn: Build
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self
|
||||||
|
- stage: RunDockerDedicatedTests
|
||||||
|
dependsOn: Build
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self
|
||||||
|
- stage: RunLinuxConsumptionTests
|
||||||
|
dependsOn: Build
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/official/jobs/ci-lc-tests.yml@self
|
|
@ -0,0 +1,47 @@
|
||||||
|
trigger:
|
||||||
|
batch: true
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
|
||||||
|
pr:
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
|
||||||
|
schedules:
|
||||||
|
- cron: '0 0 * * MON'
|
||||||
|
displayName: At 12:00 AM, only on Monday
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
always: true
|
||||||
|
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: 1es
|
||||||
|
type: git
|
||||||
|
name: 1ESPipelineTemplates/1ESPipelineTemplates
|
||||||
|
ref: refs/tags/release
|
||||||
|
|
||||||
|
extends:
|
||||||
|
template: v1/1ES.Unofficial.PipelineTemplate.yml@1es
|
||||||
|
parameters:
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc-public
|
||||||
|
image: 1es-windows-2022
|
||||||
|
os: windows
|
||||||
|
sdl:
|
||||||
|
codeql:
|
||||||
|
compiled:
|
||||||
|
enabled: true # still only runs for default branch
|
||||||
|
runSourceLanguagesInSourceAnalysis: true
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- stage: Build
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/jobs/build.yml@self
|
||||||
|
- stage: RunUnitTests
|
||||||
|
dependsOn: Build
|
||||||
|
jobs:
|
||||||
|
- template: /eng/templates/jobs/ci-unit-tests.yml@self
|
|
@ -0,0 +1,22 @@
|
||||||
|
jobs:
|
||||||
|
- job: "Build"
|
||||||
|
displayName: 'Build python worker'
|
||||||
|
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc-public
|
||||||
|
image: 1es-ubuntu-22.04
|
||||||
|
os: linux
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "3.11"
|
||||||
|
- bash: |
|
||||||
|
python --version
|
||||||
|
displayName: 'Check python version'
|
||||||
|
- bash: |
|
||||||
|
python -m venv .env
|
||||||
|
.env\Scripts\Activate.ps1
|
||||||
|
python -m pip install --upgrade pip==23.0
|
||||||
|
python -m pip install .
|
||||||
|
displayName: 'Build python worker'
|
|
@ -0,0 +1,42 @@
|
||||||
|
jobs:
|
||||||
|
- job: "TestPython"
|
||||||
|
displayName: "Run Python Unit Tests"
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
Python37:
|
||||||
|
PYTHON_VERSION: '3.7'
|
||||||
|
Python38:
|
||||||
|
PYTHON_VERSION: '3.8'
|
||||||
|
Python39:
|
||||||
|
PYTHON_VERSION: '3.9'
|
||||||
|
Python310:
|
||||||
|
PYTHON_VERSION: '3.10'
|
||||||
|
Python311:
|
||||||
|
PYTHON_VERSION: '3.11'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: $(PYTHON_VERSION)
|
||||||
|
- task: UseDotNet@2
|
||||||
|
displayName: 'Install .NET 8'
|
||||||
|
inputs:
|
||||||
|
version: 8.0.x
|
||||||
|
- bash: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
python -m pip install -U azure-functions --pre
|
||||||
|
|
||||||
|
python -m pip install -U -e .[dev]
|
||||||
|
if [[ $(PYTHON_VERSION) != "3.7" ]]; then
|
||||||
|
python -m pip install --pre -U -e .[test-http-v2]
|
||||||
|
fi
|
||||||
|
|
||||||
|
python setup.py build
|
||||||
|
python setup.py webhost --branch-name=dev
|
||||||
|
python setup.py extension
|
||||||
|
displayName: "Install dependencies"
|
||||||
|
- bash: |
|
||||||
|
python -m pytest -q -n auto --dist loadfile --reruns 4 --instafail --cov=./azure_functions_worker --cov-report xml --cov-branch tests/unittests
|
||||||
|
displayName: "Running $(PYTHON_VERSION) Unit Tests"
|
||||||
|
|
|
@ -1,39 +1,34 @@
|
||||||
name: $(Date:yyyyMMdd).$(Rev:r)
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
- release/3.*
|
|
||||||
- release/4.*
|
|
||||||
- dev
|
|
||||||
|
|
||||||
variables:
|
|
||||||
patchBuildNumberForDev: $(Build.BuildNumber)
|
|
||||||
PROD_V4_WORKER_PY: 'python/prodV4/worker.py'
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- job: Build_WINDOWS_X64
|
- job: Build_WINDOWS_X64
|
||||||
pool:
|
pool:
|
||||||
name: '1ES-Hosted-AzFunc' #MMS2019TLS for Windows2019 or MMSUbuntu20.04TLS for ubuntu
|
name: 1es-pool-azfunc-public
|
||||||
demands:
|
image: 1es-windows-2022
|
||||||
- ImageOverride -equals MMS2019TLS
|
os: windows
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
Python37V4:
|
Python37V4:
|
||||||
pythonVersion: '3.7'
|
pythonVersion: '3.7'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python38V4:
|
Python38V4:
|
||||||
pythonVersion: '3.8'
|
pythonVersion: '3.8'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python39V4:
|
Python39V4:
|
||||||
pythonVersion: '3.9'
|
pythonVersion: '3.9'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python310V4:
|
Python310V4:
|
||||||
pythonVersion: '3.10'
|
pythonVersion: '3.10'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python311V4:
|
Python311V4:
|
||||||
pythonVersion: '3.11'
|
pythonVersion: '3.11'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
|
templateContext:
|
||||||
|
outputParentDirectory: $(Build.ArtifactStagingDirectory)
|
||||||
|
outputs:
|
||||||
|
- output: pipelineArtifact
|
||||||
|
targetPath: $(Build.SourcesDirectory)
|
||||||
|
artifactName: "$(pythonVersion)_WINDOWS_X64"
|
||||||
steps:
|
steps:
|
||||||
- template: pack/templates/win_env_gen.yml
|
- template: ../../../../pack/templates/win_env_gen.yml
|
||||||
parameters:
|
parameters:
|
||||||
pythonVersion: '$(pythonVersion)'
|
pythonVersion: '$(pythonVersion)'
|
||||||
workerPath: '$(workerPath)'
|
workerPath: '$(workerPath)'
|
||||||
|
@ -41,28 +36,34 @@ jobs:
|
||||||
artifactName: '$(pythonVersion)_WINDOWS_X64'
|
artifactName: '$(pythonVersion)_WINDOWS_X64'
|
||||||
- job: Build_WINDOWS_X86
|
- job: Build_WINDOWS_X86
|
||||||
pool:
|
pool:
|
||||||
name: '1ES-Hosted-AzFunc' #MMS2019TLS for Windows2019 or MMSUbuntu20.04TLS for ubuntu
|
name: 1es-pool-azfunc-public
|
||||||
demands:
|
image: 1es-windows-2022
|
||||||
- ImageOverride -equals MMS2019TLS
|
os: windows
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
Python37V4:
|
Python37V4:
|
||||||
pythonVersion: '3.7'
|
pythonVersion: '3.7'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python38V4:
|
Python38V4:
|
||||||
pythonVersion: '3.8'
|
pythonVersion: '3.8'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python39V4:
|
Python39V4:
|
||||||
pythonVersion: '3.9'
|
pythonVersion: '3.9'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python310V4:
|
Python310V4:
|
||||||
pythonVersion: '3.10'
|
pythonVersion: '3.10'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python311V4:
|
Python311V4:
|
||||||
pythonVersion: '3.11'
|
pythonVersion: '3.11'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
|
templateContext:
|
||||||
|
outputParentDirectory: $(Build.ArtifactStagingDirectory)
|
||||||
|
outputs:
|
||||||
|
- output: pipelineArtifact
|
||||||
|
targetPath: $(Build.SourcesDirectory)
|
||||||
|
artifactName: "$(pythonVersion)_WINDOWS_X86"
|
||||||
steps:
|
steps:
|
||||||
- template: pack/templates/win_env_gen.yml
|
- template: ../../../../pack/templates/win_env_gen.yml
|
||||||
parameters:
|
parameters:
|
||||||
pythonVersion: '$(pythonVersion)'
|
pythonVersion: '$(pythonVersion)'
|
||||||
workerPath: '$(workerPath)'
|
workerPath: '$(workerPath)'
|
||||||
|
@ -70,74 +71,96 @@ jobs:
|
||||||
artifactName: '$(pythonVersion)_WINDOWS_x86'
|
artifactName: '$(pythonVersion)_WINDOWS_x86'
|
||||||
- job: Build_LINUX_X64
|
- job: Build_LINUX_X64
|
||||||
pool:
|
pool:
|
||||||
name: '1ES-Hosted-AzFunc' # MMS2019TLS for Windows2019 or MMSUbuntu20.04TLS for ubuntu
|
name: 1es-pool-azfunc
|
||||||
demands:
|
image: 1es-ubuntu-22.04
|
||||||
- ImageOverride -equals MMSUbuntu20.04TLS
|
os: linux
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
Python37V4:
|
Python37V4:
|
||||||
pythonVersion: '3.7'
|
pythonVersion: '3.7'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python38V4:
|
Python38V4:
|
||||||
pythonVersion: '3.8'
|
pythonVersion: '3.8'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python39V4:
|
Python39V4:
|
||||||
pythonVersion: '3.9'
|
pythonVersion: '3.9'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python310V4:
|
Python310V4:
|
||||||
pythonVersion: '3.10'
|
pythonVersion: '3.10'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python311V4:
|
Python311V4:
|
||||||
pythonVersion: '3.11'
|
pythonVersion: '3.11'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
|
templateContext:
|
||||||
|
outputParentDirectory: $(Build.ArtifactStagingDirectory)
|
||||||
|
outputs:
|
||||||
|
- output: pipelineArtifact
|
||||||
|
targetPath: $(Build.SourcesDirectory)
|
||||||
|
artifactName: "$(pythonVersion)_LINUX_X64"
|
||||||
steps:
|
steps:
|
||||||
- template: pack/templates/nix_env_gen.yml
|
- template: ../../../../pack/templates/nix_env_gen.yml
|
||||||
parameters:
|
parameters:
|
||||||
pythonVersion: '$(pythonVersion)'
|
pythonVersion: '$(pythonVersion)'
|
||||||
workerPath: '$(workerPath)'
|
workerPath: '$(workerPath)'
|
||||||
artifactName: '$(pythonVersion)_LINUX_X64'
|
artifactName: '$(pythonVersion)_LINUX_X64'
|
||||||
- job: Build_OSX_X64
|
- job: Build_OSX_X64
|
||||||
pool:
|
pool:
|
||||||
vmImage: 'macOS-latest'
|
name: Azure Pipelines
|
||||||
|
image: macOS-latest
|
||||||
|
os: macOS
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
Python37V4:
|
Python37V4:
|
||||||
pythonVersion: '3.7'
|
pythonVersion: '3.7'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python38V4:
|
Python38V4:
|
||||||
pythonVersion: '3.8'
|
pythonVersion: '3.8'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python39V4:
|
Python39V4:
|
||||||
pythonVersion: '3.9'
|
pythonVersion: '3.9'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python310V4:
|
Python310V4:
|
||||||
pythonVersion: '3.10'
|
pythonVersion: '3.10'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python311V4:
|
Python311V4:
|
||||||
pythonVersion: '3.11'
|
pythonVersion: '3.11'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
|
templateContext:
|
||||||
|
outputParentDirectory: $(Build.ArtifactStagingDirectory)
|
||||||
|
outputs:
|
||||||
|
- output: pipelineArtifact
|
||||||
|
targetPath: $(Build.SourcesDirectory)
|
||||||
|
artifactName: "$(pythonVersion)_OSX_X64"
|
||||||
steps:
|
steps:
|
||||||
- template: pack/templates/nix_env_gen.yml
|
- template: ../../../../pack/templates/nix_env_gen.yml
|
||||||
parameters:
|
parameters:
|
||||||
pythonVersion: '$(pythonVersion)'
|
pythonVersion: '$(pythonVersion)'
|
||||||
workerPath: '$(workerPath)'
|
workerPath: '$(workerPath)'
|
||||||
artifactName: '$(pythonVersion)_OSX_X64'
|
artifactName: '$(pythonVersion)_OSX_X64'
|
||||||
- job: Build_OSX_ARM64
|
- job: Build_OSX_ARM64
|
||||||
pool:
|
pool:
|
||||||
vmImage: 'macOS-latest'
|
name: Azure Pipelines
|
||||||
|
image: macOS-latest
|
||||||
|
os: macOS
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
Python39V4:
|
Python39V4:
|
||||||
pythonVersion: '3.9'
|
pythonVersion: '3.9'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python310V4:
|
Python310V4:
|
||||||
pythonVersion: '3.10'
|
pythonVersion: '3.10'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
Python311V4:
|
Python311V4:
|
||||||
pythonVersion: '3.11'
|
pythonVersion: '3.11'
|
||||||
workerPath: $(PROD_V4_WORKER_PY)
|
workerPath: 'python/prodV4/worker.py'
|
||||||
|
templateContext:
|
||||||
|
outputParentDirectory: $(Build.ArtifactStagingDirectory)
|
||||||
|
outputs:
|
||||||
|
- output: pipelineArtifact
|
||||||
|
targetPath: $(Build.SourcesDirectory)
|
||||||
|
artifactName: "$(pythonVersion)_OSX_ARM4"
|
||||||
steps:
|
steps:
|
||||||
- template: pack/templates/macos_64_env_gen.yml
|
- template: ../../../../pack/templates/macos_64_env_gen.yml
|
||||||
parameters:
|
parameters:
|
||||||
pythonVersion: '$(pythonVersion)'
|
pythonVersion: '$(pythonVersion)'
|
||||||
workerPath: '$(workerPath)'
|
workerPath: '$(workerPath)'
|
||||||
|
@ -145,11 +168,17 @@ jobs:
|
||||||
|
|
||||||
- job: PackageWorkers
|
- job: PackageWorkers
|
||||||
dependsOn: ['Build_WINDOWS_X64', 'Build_WINDOWS_X86', 'Build_LINUX_X64', 'Build_OSX_X64', 'Build_OSX_ARM64']
|
dependsOn: ['Build_WINDOWS_X64', 'Build_WINDOWS_X86', 'Build_LINUX_X64', 'Build_OSX_X64', 'Build_OSX_ARM64']
|
||||||
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), eq(variables['Build.SourceBranch'], 'refs/heads/dev'), eq(variables['GeneratePackage'], True))
|
templateContext:
|
||||||
pool:
|
outputParentDirectory: $(Build.ArtifactStagingDirectory)
|
||||||
name: '1ES-Hosted-AzFunc'
|
outputs:
|
||||||
demands:
|
- output: nuget
|
||||||
- ImageOverride -equals MMS2019TLS
|
condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/dev'), eq(variables['UPLOADPACKAGETOPRERELEASEFEED'], true))
|
||||||
|
useDotNetTask: false
|
||||||
|
packagesToPush: '$(Build.ArtifactStagingDirectory)/**/*.nupkg;!$(Build.ArtifactStagingDirectory)/**/*.symbols.nupkg'
|
||||||
|
packageParentPath: "$(Build.ArtifactStagingDirectory)"
|
||||||
|
publishVstsFeed: "e6a70c92-4128-439f-8012-382fe78d6396/f37f760c-aebd-443e-9714-ce725cd427df"
|
||||||
|
nuGetFeedType: "internal"
|
||||||
|
allowPackageConflicts: true
|
||||||
steps:
|
steps:
|
||||||
- bash: |
|
- bash: |
|
||||||
echo "Releasing from $BUILD_SOURCEBRANCHNAME"
|
echo "Releasing from $BUILD_SOURCEBRANCHNAME"
|
||||||
|
@ -165,7 +194,7 @@ jobs:
|
||||||
echo "Generating V4 Integration Test Package for $BUILD_SOURCEBRANCHNAME"
|
echo "Generating V4 Integration Test Package for $BUILD_SOURCEBRANCHNAME"
|
||||||
VERSION=$(cat azure_functions_worker/version.py | tail -1 | cut -d' ' -f3 | sed "s/'//g")
|
VERSION=$(cat azure_functions_worker/version.py | tail -1 | cut -d' ' -f3 | sed "s/'//g")
|
||||||
NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec"
|
NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec"
|
||||||
WKVERSION="$VERSION-$(patchBuildNumberForDev)"
|
WKVERSION="$VERSION-$(Build.BuildNumber)"
|
||||||
else
|
else
|
||||||
# this is only to test nuget related workflow because we are setting nuspec here
|
# this is only to test nuget related workflow because we are setting nuspec here
|
||||||
echo "Generating Integration Test Package for $BUILD_SOURCEBRANCHNAME for testing purpose"
|
echo "Generating Integration Test Package for $BUILD_SOURCEBRANCHNAME for testing purpose"
|
||||||
|
@ -179,11 +208,10 @@ jobs:
|
||||||
echo "##vso[task.setvariable variable=nuspec_path]$NUSPEC"
|
echo "##vso[task.setvariable variable=nuspec_path]$NUSPEC"
|
||||||
echo "##vso[task.setvariable variable=worker_version]$WKVERSION"
|
echo "##vso[task.setvariable variable=worker_version]$WKVERSION"
|
||||||
displayName: "Generate Worker NuGet Package for Release $BUILD_SOURCEBRANCHNAME"
|
displayName: "Generate Worker NuGet Package for Release $BUILD_SOURCEBRANCHNAME"
|
||||||
- task: DownloadBuildArtifacts@0
|
- task: DownloadPipelineArtifact@2
|
||||||
inputs:
|
inputs:
|
||||||
buildType: 'current'
|
buildType: 'current'
|
||||||
downloadType: 'specific'
|
targetPath: '$(Build.SourcesDirectory)'
|
||||||
downloadPath: '$(Build.SourcesDirectory)'
|
|
||||||
- task: ManifestGeneratorTask@0
|
- task: ManifestGeneratorTask@0
|
||||||
displayName: 'SBOM Generation Task'
|
displayName: 'SBOM Generation Task'
|
||||||
inputs:
|
inputs:
|
||||||
|
@ -202,16 +230,3 @@ jobs:
|
||||||
packDestination: $(Build.ArtifactStagingDirectory)
|
packDestination: $(Build.ArtifactStagingDirectory)
|
||||||
versioningScheme: 'byEnvVar'
|
versioningScheme: 'byEnvVar'
|
||||||
versionEnvVar: WORKER_VERSION
|
versionEnvVar: WORKER_VERSION
|
||||||
- task: PublishBuildArtifacts@1
|
|
||||||
inputs:
|
|
||||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
artifactName: 'PythonWorker'
|
|
||||||
- task: NuGetCommand@2
|
|
||||||
condition: eq(variables['UPLOADPACKAGETOPRERELEASEFEED'], true)
|
|
||||||
inputs:
|
|
||||||
command: 'push'
|
|
||||||
packagesToPush: '$(Build.ArtifactStagingDirectory)/**/*.nupkg;!$(Build.ArtifactStagingDirectory)/**/*.symbols.nupkg'
|
|
||||||
nuGetFeedType: 'internal'
|
|
||||||
publishVstsFeed: 'e6a70c92-4128-439f-8012-382fe78d6396/f37f760c-aebd-443e-9714-ce725cd427df'
|
|
||||||
allowPackageConflicts: true
|
|
||||||
displayName: '[Integration Test] Push NuGet package to the AzureFunctionsPreRelease feed'
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
jobs:
|
||||||
|
- job: "TestPython"
|
||||||
|
displayName: "Run Python Core Tools E2E Tests"
|
||||||
|
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-ubuntu-22.04
|
||||||
|
os: linux
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
displayName: 'Install Python'
|
||||||
|
inputs:
|
||||||
|
versionSpec: "3.10"
|
||||||
|
addToPath: true
|
||||||
|
- task: UseDotNet@2
|
||||||
|
displayName: 'Install DotNet 3'
|
||||||
|
inputs:
|
||||||
|
packageType: 'sdk'
|
||||||
|
version: "3.1.x"
|
||||||
|
- task: UseDotNet@2
|
||||||
|
displayName: 'Install DotNet 6'
|
||||||
|
inputs:
|
||||||
|
packageType: 'sdk'
|
||||||
|
version: "6.x"
|
||||||
|
- pwsh: '$(Build.SourcesDirectory)/.ci/e2e_integration_test/start-e2e.ps1'
|
||||||
|
env:
|
||||||
|
AzureWebJobsStorage: $(LinuxStorageConnectionString311)
|
||||||
|
AzureWebJobsCosmosDBConnectionString: $(LinuxCosmosDBConnectionString311)
|
||||||
|
AzureWebJobsEventHubConnectionString: $(LinuxEventHubConnectionString311)
|
||||||
|
AzureWebJobsServiceBusConnectionString: $(LinuxServiceBusConnectionString311)
|
||||||
|
AzureWebJobsSqlConnectionString: $(LinuxSqlConnectionString311)
|
||||||
|
AzureWebJobsEventGridTopicUri: $(LinuxEventGridTopicUriString311)
|
||||||
|
AzureWebJobsEventGridConnectionKey: $(LinuxEventGridConnectionKeyString311)
|
||||||
|
displayName: 'Running Python Language Worker E2E Tests'
|
|
@ -0,0 +1,37 @@
|
||||||
|
jobs:
|
||||||
|
- job: "TestPython"
|
||||||
|
displayName: "Run Python Docker Custom Tests"
|
||||||
|
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-ubuntu-22.04
|
||||||
|
os: linux
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: $(CUSTOM_PYTHON_VERSION)
|
||||||
|
- bash: |
|
||||||
|
python -m pip install -U -e .[dev]
|
||||||
|
if [[ $(PYTHON_VERSION) != "3.7" ]]; then
|
||||||
|
python -m pip install --pre -U -e .[test-http-v2]
|
||||||
|
fi
|
||||||
|
if [[ $(PYTHON_VERSION) != "3.7" && $(PYTHON_VERSION) != "3.8" ]]; then
|
||||||
|
python -m pip install --pre -U -e .[test-deferred-bindings]
|
||||||
|
fi
|
||||||
|
python setup.py build
|
||||||
|
displayName: 'Install dependencies'
|
||||||
|
- bash: |
|
||||||
|
python -m pytest --reruns 4 -vv --instafail tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests
|
||||||
|
env:
|
||||||
|
DEDICATED_DOCKER_TEST: $(CUSTOM_DED_IMAGE)
|
||||||
|
CONSUMPTION_DOCKER_TEST: $(CUSTOM_CON_IMAGE)
|
||||||
|
IMAGE_NAME: $(CUSTOM_IMAGE_NAME)
|
||||||
|
AzureWebJobsStorage: $(LinuxStorageConnectionString311)
|
||||||
|
AzureWebJobsCosmosDBConnectionString: $(LinuxCosmosDBConnectionString311)
|
||||||
|
AzureWebJobsEventHubConnectionString: $(LinuxEventHubConnectionString311)
|
||||||
|
AzureWebJobsServiceBusConnectionString: $(LinuxServiceBusConnectionString311)
|
||||||
|
AzureWebJobsSqlConnectionString: $(LinuxSqlConnectionString311)
|
||||||
|
AzureWebJobsEventGridTopicUri: $(LinuxEventGridTopicUriString311)
|
||||||
|
AzureWebJobsEventGridConnectionKey: $(LinuxEventGridConnectionKeyString311)
|
||||||
|
displayName: "Running Python DockerCustom tests"
|
|
@ -0,0 +1,72 @@
|
||||||
|
jobs:
|
||||||
|
- job: "TestPython"
|
||||||
|
displayName: "Run Python Docker Consumption Tests"
|
||||||
|
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-ubuntu-22.04
|
||||||
|
os: linux
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
Python38:
|
||||||
|
PYTHON_VERSION: '3.8'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString38)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString38)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString38)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38)
|
||||||
|
Python39:
|
||||||
|
PYTHON_VERSION: '3.9'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString39)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString39)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString39)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39)
|
||||||
|
Python310:
|
||||||
|
PYTHON_VERSION: '3.10'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString310)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString310)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString310)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310)
|
||||||
|
Python311:
|
||||||
|
PYTHON_VERSION: '3.11'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString311)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString311)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString311)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311)
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: $(PYTHON_VERSION)
|
||||||
|
- bash: |
|
||||||
|
python -m pip install -U -e .[dev]
|
||||||
|
python -m pip install --pre -U -e .[test-http-v2]
|
||||||
|
if [[ $(PYTHON_VERSION) != "3.8" ]]; then
|
||||||
|
python -m pip install --pre -U -e .[test-deferred-bindings]
|
||||||
|
fi
|
||||||
|
python setup.py build
|
||||||
|
displayName: 'Install dependencies'
|
||||||
|
- bash: |
|
||||||
|
python -m pytest --reruns 4 -vv --instafail tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests
|
||||||
|
env:
|
||||||
|
CONSUMPTION_DOCKER_TEST: "true"
|
||||||
|
AzureWebJobsStorage: $(STORAGE_CONNECTION)
|
||||||
|
AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION)
|
||||||
|
AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION)
|
||||||
|
AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION)
|
||||||
|
AzureWebJobsSqlConnectionString: $(SQL_CONNECTION)
|
||||||
|
AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI)
|
||||||
|
AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION)
|
||||||
|
displayName: "Running $(PYTHON_VERSION) Docker Consumption tests"
|
|
@ -0,0 +1,72 @@
|
||||||
|
jobs:
|
||||||
|
- job: "TestPython"
|
||||||
|
displayName: "Run Python Docker Dedicated Tests"
|
||||||
|
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-ubuntu-22.04
|
||||||
|
os: linux
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
Python38:
|
||||||
|
PYTHON_VERSION: '3.8'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString38)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString38)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString38)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38)
|
||||||
|
Python39:
|
||||||
|
PYTHON_VERSION: '3.9'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString39)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString39)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString39)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39)
|
||||||
|
Python310:
|
||||||
|
PYTHON_VERSION: '3.10'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString310)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString310)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString310)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310)
|
||||||
|
Python311:
|
||||||
|
PYTHON_VERSION: '3.11'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString311)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString311)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString311)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311)
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: $(PYTHON_VERSION)
|
||||||
|
- bash: |
|
||||||
|
python -m pip install -U -e .[dev]
|
||||||
|
python -m pip install --pre -U -e .[test-http-v2]
|
||||||
|
if [[ $(PYTHON_VERSION) != "3.8" ]]; then
|
||||||
|
python -m pip install --pre -U -e .[test-deferred-bindings]
|
||||||
|
fi
|
||||||
|
python setup.py build
|
||||||
|
displayName: 'Install dependencies'
|
||||||
|
- bash: |
|
||||||
|
python -m pytest --reruns 4 -vv --instafail tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests
|
||||||
|
env:
|
||||||
|
DEDICATED_DOCKER_TEST: "true"
|
||||||
|
AzureWebJobsStorage: $(STORAGE_CONNECTION)
|
||||||
|
AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION)
|
||||||
|
AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION)
|
||||||
|
AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION)
|
||||||
|
AzureWebJobsSqlConnectionString: $(SQL_CONNECTION)
|
||||||
|
AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI)
|
||||||
|
AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION)
|
||||||
|
displayName: "Running $(PYTHON_VERSION) Docker Dedicated tests"
|
|
@ -0,0 +1,92 @@
|
||||||
|
jobs:
|
||||||
|
- job: "TestPython"
|
||||||
|
displayName: "Run Python E2E Tests"
|
||||||
|
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-ubuntu-22.04
|
||||||
|
os: linux
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
Python37:
|
||||||
|
PYTHON_VERSION: '3.7'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString37)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString37)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString37)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString37)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString37)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString37)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString37)
|
||||||
|
Python38:
|
||||||
|
PYTHON_VERSION: '3.8'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString38)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString38)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString38)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38)
|
||||||
|
Python39:
|
||||||
|
PYTHON_VERSION: '3.9'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString39)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString39)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString39)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39)
|
||||||
|
Python310:
|
||||||
|
PYTHON_VERSION: '3.10'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString310)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString310)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString310)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310)
|
||||||
|
Python311:
|
||||||
|
PYTHON_VERSION: '3.11'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString311)
|
||||||
|
COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311)
|
||||||
|
EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311)
|
||||||
|
SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311)
|
||||||
|
SQL_CONNECTION: $(LinuxSqlConnectionString311)
|
||||||
|
EVENTGRID_URI: $(LinuxEventGridTopicUriString311)
|
||||||
|
EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311)
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: $(PYTHON_VERSION)
|
||||||
|
- task: UseDotNet@2
|
||||||
|
displayName: 'Install .NET 8'
|
||||||
|
inputs:
|
||||||
|
version: 8.0.x
|
||||||
|
- bash: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
python -m pip install -U azure-functions --pre
|
||||||
|
python -m pip install -U -e .[dev]
|
||||||
|
|
||||||
|
if [[ $(PYTHON_VERSION) != "3.7" ]]; then
|
||||||
|
python -m pip install --pre -U -e .[test-http-v2]
|
||||||
|
fi
|
||||||
|
if [[ $(PYTHON_VERSION) != "3.7" && $(PYTHON_VERSION) != "3.8" ]]; then
|
||||||
|
python -m pip install --pre -U -e .[test-deferred-bindings]
|
||||||
|
fi
|
||||||
|
|
||||||
|
python setup.py build
|
||||||
|
python setup.py webhost --branch-name=dev
|
||||||
|
python setup.py extension
|
||||||
|
mkdir logs
|
||||||
|
displayName: 'Install dependencies and the worker'
|
||||||
|
- bash: |
|
||||||
|
python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests
|
||||||
|
env:
|
||||||
|
AzureWebJobsStorage: $(STORAGE_CONNECTION)
|
||||||
|
AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION)
|
||||||
|
AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION)
|
||||||
|
AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION)
|
||||||
|
AzureWebJobsSqlConnectionString: $(SQL_CONNECTION)
|
||||||
|
AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI)
|
||||||
|
AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION)
|
||||||
|
displayName: "Running $(PYTHON_VERSION) Python E2E Tests"
|
|
@ -0,0 +1,48 @@
|
||||||
|
jobs:
|
||||||
|
- job: "TestPython"
|
||||||
|
displayName: "Run Python Linux Consumption Tests"
|
||||||
|
|
||||||
|
pool:
|
||||||
|
name: 1es-pool-azfunc
|
||||||
|
image: 1es-ubuntu-22.04
|
||||||
|
os: linux
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
Python37:
|
||||||
|
PYTHON_VERSION: '3.7'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString37)
|
||||||
|
Python38:
|
||||||
|
PYTHON_VERSION: '3.8'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString38)
|
||||||
|
Python39:
|
||||||
|
PYTHON_VERSION: '3.9'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString39)
|
||||||
|
Python310:
|
||||||
|
PYTHON_VERSION: '3.10'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString310)
|
||||||
|
Python311:
|
||||||
|
PYTHON_VERSION: '3.11'
|
||||||
|
STORAGE_CONNECTION: $(LinuxStorageConnectionString311)
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: $(PYTHON_VERSION)
|
||||||
|
- bash: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
python -m pip install -U azure-functions --pre
|
||||||
|
python -m pip install -U -e .[dev]
|
||||||
|
|
||||||
|
if [[ $(PYTHON_VERSION) != "3.7" ]]; then
|
||||||
|
python -m pip install --pre -U -e .[test-http-v2]
|
||||||
|
fi
|
||||||
|
|
||||||
|
python setup.py build
|
||||||
|
displayName: 'Install dependencies and the worker'
|
||||||
|
- bash: |
|
||||||
|
python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
||||||
|
env:
|
||||||
|
AzureWebJobsStorage: $(STORAGE_CONNECTION)
|
||||||
|
_DUMMY_CONT_KEY: $(_DUMMY_CONT_KEY)
|
||||||
|
displayName: "Running $(PYTHON_VERSION) Linux Consumption tests"
|
|
@ -26,7 +26,3 @@ steps:
|
||||||
!grpc_tools/**/*
|
!grpc_tools/**/*
|
||||||
!grpcio_tools*/*
|
!grpcio_tools*/*
|
||||||
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||||
- task: PublishBuildArtifacts@1
|
|
||||||
inputs:
|
|
||||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
artifactName: ${{ parameters.artifactName }}
|
|
||||||
|
|
|
@ -26,7 +26,3 @@ steps:
|
||||||
!grpc_tools/**/*
|
!grpc_tools/**/*
|
||||||
!grpcio_tools*/*
|
!grpcio_tools*/*
|
||||||
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||||
- task: PublishBuildArtifacts@1
|
|
||||||
inputs:
|
|
||||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
artifactName: ${{ parameters.artifactName }}
|
|
||||||
|
|
|
@ -26,7 +26,3 @@ steps:
|
||||||
!grpc_tools\**\*
|
!grpc_tools\**\*
|
||||||
!grpcio_tools*\*
|
!grpcio_tools*\*
|
||||||
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||||
- task: PublishBuildArtifacts@1
|
|
||||||
inputs:
|
|
||||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
artifactName: ${{ parameters.artifactName }}
|
|
||||||
|
|
4
setup.py
4
setup.py
|
@ -71,7 +71,7 @@ PACKAGES = [
|
||||||
"azure_functions_worker._thirdparty",
|
"azure_functions_worker._thirdparty",
|
||||||
]
|
]
|
||||||
|
|
||||||
INSTALL_REQUIRES = ["azure-functions==1.20.0b2", "python-dateutil~=2.8.2"]
|
INSTALL_REQUIRES = ["azure-functions==1.20.0", "python-dateutil~=2.8.2"]
|
||||||
|
|
||||||
if sys.version_info[:2] == (3, 7):
|
if sys.version_info[:2] == (3, 7):
|
||||||
INSTALL_REQUIRES.extend(
|
INSTALL_REQUIRES.extend(
|
||||||
|
@ -93,7 +93,7 @@ EXTRA_REQUIRES = {
|
||||||
"pycryptodome~=3.10.1",
|
"pycryptodome~=3.10.1",
|
||||||
"flake8~=4.0.1",
|
"flake8~=4.0.1",
|
||||||
"mypy",
|
"mypy",
|
||||||
"pytest",
|
"pytest~=7.4.4",
|
||||||
"requests==2.*",
|
"requests==2.*",
|
||||||
"coverage",
|
"coverage",
|
||||||
"pytest-sugar",
|
"pytest-sugar",
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
# Licensed under the MIT License.
|
# Licensed under the MIT License.
|
||||||
import azure.functions as func
|
import azure.functions as func
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS)
|
app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS)
|
||||||
|
@ -45,7 +46,7 @@ def mytimer(mytimer: func.TimerRequest, testEntity) -> None:
|
||||||
logging.info("This timer trigger function executed successfully")
|
logging.info("This timer trigger function executed successfully")
|
||||||
|
|
||||||
|
|
||||||
@app.function_name(name="mytimer2")
|
@app.function_name(name="return_string")
|
||||||
@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer",
|
@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer",
|
||||||
run_on_startup=False,
|
run_on_startup=False,
|
||||||
use_monitor=False)
|
use_monitor=False)
|
||||||
|
@ -54,5 +55,90 @@ def mytimer(mytimer: func.TimerRequest, testEntity) -> None:
|
||||||
type="table",
|
type="table",
|
||||||
connection="AzureWebJobsStorage",
|
connection="AzureWebJobsStorage",
|
||||||
table_name="EventHubBatchTest")
|
table_name="EventHubBatchTest")
|
||||||
def mytimer2(mytimer: func.TimerRequest, testEntity):
|
def return_string(mytimer: func.TimerRequest, testEntity):
|
||||||
logging.info("Timer trigger with none return and no type hint")
|
logging.info("Return string")
|
||||||
|
return "hi!"
|
||||||
|
|
||||||
|
|
||||||
|
@app.function_name(name="return_bytes")
|
||||||
|
@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer",
|
||||||
|
run_on_startup=False,
|
||||||
|
use_monitor=False)
|
||||||
|
@app.generic_input_binding(
|
||||||
|
arg_name="testEntity",
|
||||||
|
type="table",
|
||||||
|
connection="AzureWebJobsStorage",
|
||||||
|
table_name="EventHubBatchTest")
|
||||||
|
def return_bytes(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return bytes")
|
||||||
|
return "test-dată"
|
||||||
|
|
||||||
|
|
||||||
|
@app.function_name(name="return_dict")
|
||||||
|
@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer",
|
||||||
|
run_on_startup=False,
|
||||||
|
use_monitor=False)
|
||||||
|
@app.generic_input_binding(
|
||||||
|
arg_name="testEntity",
|
||||||
|
type="table",
|
||||||
|
connection="AzureWebJobsStorage",
|
||||||
|
table_name="EventHubBatchTest")
|
||||||
|
def return_dict(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return dict")
|
||||||
|
return {"hello": "world"}
|
||||||
|
|
||||||
|
|
||||||
|
@app.function_name(name="return_list")
|
||||||
|
@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer",
|
||||||
|
run_on_startup=False,
|
||||||
|
use_monitor=False)
|
||||||
|
@app.generic_input_binding(
|
||||||
|
arg_name="testEntity",
|
||||||
|
type="table",
|
||||||
|
connection="AzureWebJobsStorage",
|
||||||
|
table_name="EventHubBatchTest")
|
||||||
|
def return_list(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return list")
|
||||||
|
return [1, 2, 3]
|
||||||
|
|
||||||
|
|
||||||
|
@app.function_name(name="return_int")
|
||||||
|
@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer",
|
||||||
|
run_on_startup=False,
|
||||||
|
use_monitor=False)
|
||||||
|
@app.generic_input_binding(
|
||||||
|
arg_name="testEntity",
|
||||||
|
type="table",
|
||||||
|
connection="AzureWebJobsStorage",
|
||||||
|
table_name="EventHubBatchTest")
|
||||||
|
def return_int(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return int")
|
||||||
|
return 12
|
||||||
|
|
||||||
|
|
||||||
|
@app.function_name(name="return_double")
|
||||||
|
@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer",
|
||||||
|
run_on_startup=False,
|
||||||
|
use_monitor=False)
|
||||||
|
@app.generic_input_binding(
|
||||||
|
arg_name="testEntity",
|
||||||
|
type="table",
|
||||||
|
connection="AzureWebJobsStorage",
|
||||||
|
table_name="EventHubBatchTest")
|
||||||
|
def return_double(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return double")
|
||||||
|
return 12.34
|
||||||
|
|
||||||
|
|
||||||
|
@app.function_name(name="return_bool")
|
||||||
|
@app.schedule(schedule="*/1 * * * * *", arg_name="mytimer",
|
||||||
|
run_on_startup=False,
|
||||||
|
use_monitor=False)
|
||||||
|
@app.generic_input_binding(
|
||||||
|
arg_name="testEntity",
|
||||||
|
type="table",
|
||||||
|
connection="AzureWebJobsStorage",
|
||||||
|
table_name="EventHubBatchTest")
|
||||||
|
def return_bool(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return bool")
|
||||||
|
return True
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"name": "mytimer",
|
||||||
|
"type": "timerTrigger",
|
||||||
|
"direction": "in",
|
||||||
|
"schedule": "*/1 * * * * *",
|
||||||
|
"runOnStartup": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"direction": "in",
|
||||||
|
"type": "table",
|
||||||
|
"name": "testEntity",
|
||||||
|
"partitionKey": "test",
|
||||||
|
"rowKey": "WillBePopulatedWithGuid",
|
||||||
|
"tableName": "BindingTestTable",
|
||||||
|
"connection": "AzureWebJobsStorage"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import azure.functions as func
|
||||||
|
|
||||||
|
|
||||||
|
def main(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return bool")
|
||||||
|
return True
|
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"name": "mytimer",
|
||||||
|
"type": "timerTrigger",
|
||||||
|
"direction": "in",
|
||||||
|
"schedule": "*/1 * * * * *",
|
||||||
|
"runOnStartup": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"direction": "in",
|
||||||
|
"type": "table",
|
||||||
|
"name": "testEntity",
|
||||||
|
"partitionKey": "test",
|
||||||
|
"rowKey": "WillBePopulatedWithGuid",
|
||||||
|
"tableName": "BindingTestTable",
|
||||||
|
"connection": "AzureWebJobsStorage"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import azure.functions as func
|
||||||
|
|
||||||
|
|
||||||
|
def main(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return bytes")
|
||||||
|
return "test-dată"
|
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"name": "mytimer",
|
||||||
|
"type": "timerTrigger",
|
||||||
|
"direction": "in",
|
||||||
|
"schedule": "*/1 * * * * *",
|
||||||
|
"runOnStartup": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"direction": "in",
|
||||||
|
"type": "table",
|
||||||
|
"name": "testEntity",
|
||||||
|
"partitionKey": "test",
|
||||||
|
"rowKey": "WillBePopulatedWithGuid",
|
||||||
|
"tableName": "BindingTestTable",
|
||||||
|
"connection": "AzureWebJobsStorage"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import azure.functions as func
|
||||||
|
|
||||||
|
|
||||||
|
def main(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return dict")
|
||||||
|
return {"hello": "world"}
|
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"name": "mytimer",
|
||||||
|
"type": "timerTrigger",
|
||||||
|
"direction": "in",
|
||||||
|
"schedule": "*/1 * * * * *",
|
||||||
|
"runOnStartup": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"direction": "in",
|
||||||
|
"type": "table",
|
||||||
|
"name": "testEntity",
|
||||||
|
"partitionKey": "test",
|
||||||
|
"rowKey": "WillBePopulatedWithGuid",
|
||||||
|
"tableName": "BindingTestTable",
|
||||||
|
"connection": "AzureWebJobsStorage"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import azure.functions as func
|
||||||
|
|
||||||
|
|
||||||
|
def main(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return double")
|
||||||
|
return 12.34
|
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"name": "mytimer",
|
||||||
|
"type": "timerTrigger",
|
||||||
|
"direction": "in",
|
||||||
|
"schedule": "*/1 * * * * *",
|
||||||
|
"runOnStartup": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"direction": "in",
|
||||||
|
"type": "table",
|
||||||
|
"name": "testEntity",
|
||||||
|
"partitionKey": "test",
|
||||||
|
"rowKey": "WillBePopulatedWithGuid",
|
||||||
|
"tableName": "BindingTestTable",
|
||||||
|
"connection": "AzureWebJobsStorage"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import azure.functions as func
|
||||||
|
|
||||||
|
|
||||||
|
def main(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return int")
|
||||||
|
return 12
|
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"name": "mytimer",
|
||||||
|
"type": "timerTrigger",
|
||||||
|
"direction": "in",
|
||||||
|
"schedule": "*/1 * * * * *",
|
||||||
|
"runOnStartup": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"direction": "in",
|
||||||
|
"type": "table",
|
||||||
|
"name": "testEntity",
|
||||||
|
"partitionKey": "test",
|
||||||
|
"rowKey": "WillBePopulatedWithGuid",
|
||||||
|
"tableName": "BindingTestTable",
|
||||||
|
"connection": "AzureWebJobsStorage"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import azure.functions as func
|
||||||
|
|
||||||
|
|
||||||
|
def main(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return list")
|
||||||
|
return [1, 2, 3]
|
|
@ -0,0 +1,21 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"name": "mytimer",
|
||||||
|
"type": "timerTrigger",
|
||||||
|
"direction": "in",
|
||||||
|
"schedule": "*/1 * * * * *",
|
||||||
|
"runOnStartup": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"direction": "in",
|
||||||
|
"type": "table",
|
||||||
|
"name": "testEntity",
|
||||||
|
"partitionKey": "test",
|
||||||
|
"rowKey": "WillBePopulatedWithGuid",
|
||||||
|
"tableName": "BindingTestTable",
|
||||||
|
"connection": "AzureWebJobsStorage"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,11 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import azure.functions as func
|
||||||
|
|
||||||
|
|
||||||
|
def main(mytimer: func.TimerRequest, testEntity):
|
||||||
|
logging.info("Return string")
|
||||||
|
return "hi!"
|
|
@ -28,7 +28,7 @@ def dotenv_func(req: func.HttpRequest) -> func.HttpResponse:
|
||||||
def numpy_func(req: func.HttpRequest) -> func.HttpResponse:
|
def numpy_func(req: func.HttpRequest) -> func.HttpResponse:
|
||||||
logging.info('Python HTTP trigger function processed a request.')
|
logging.info('Python HTTP trigger function processed a request.')
|
||||||
|
|
||||||
res = "array: {}".format(np.array([1, 2], dtype=complex))
|
res = "numpy version: {}".format(np.__version__)
|
||||||
|
|
||||||
return func.HttpResponse(res)
|
return func.HttpResponse(res)
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,6 @@ import numpy as np
|
||||||
def main(req: func.HttpRequest) -> func.HttpResponse:
|
def main(req: func.HttpRequest) -> func.HttpResponse:
|
||||||
logging.info('Python HTTP trigger function processed a request.')
|
logging.info('Python HTTP trigger function processed a request.')
|
||||||
|
|
||||||
res = "array: {}".format(np.array([1, 2], dtype=complex))
|
res = "numpy version: {}".format(np.__version__)
|
||||||
|
|
||||||
return func.HttpResponse(res)
|
return func.HttpResponse(res)
|
||||||
|
|
|
@ -44,25 +44,33 @@ class TestGenericFunctions(testutils.WebHostTestCase):
|
||||||
r = self.webhost.request('GET', 'return_not_processed_last')
|
r = self.webhost.request('GET', 'return_not_processed_last')
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
||||||
def test_return_none(self):
|
def test_return_types(self):
|
||||||
time.sleep(1)
|
# Checking that the function app is okay
|
||||||
|
time.sleep(10)
|
||||||
# Checking webhost status.
|
# Checking webhost status.
|
||||||
r = self.webhost.request('GET', '', no_prefix=True,
|
r = self.webhost.request('GET', '', no_prefix=True,
|
||||||
timeout=5)
|
timeout=5)
|
||||||
self.assertTrue(r.ok)
|
self.assertTrue(r.ok)
|
||||||
|
|
||||||
def test_return_none_no_type_hint(self):
|
def check_log_return_types(self, host_out: typing.List[str]):
|
||||||
time.sleep(1)
|
# Checks that functions executed correctly
|
||||||
# Checking webhost status.
|
self.assertIn("This timer trigger function executed "
|
||||||
r = self.webhost.request('GET', '', no_prefix=True,
|
"successfully", host_out)
|
||||||
timeout=5)
|
self.assertIn("Return string", host_out)
|
||||||
self.assertTrue(r.ok)
|
self.assertIn("Return bytes", host_out)
|
||||||
|
self.assertIn("Return dict", host_out)
|
||||||
|
self.assertIn("Return list", host_out)
|
||||||
|
self.assertIn("Return int", host_out)
|
||||||
|
self.assertIn("Return double", host_out)
|
||||||
|
self.assertIn("Return bool", host_out)
|
||||||
|
|
||||||
def check_log_timer(self, host_out: typing.List[str]):
|
# Checks for failed executions (TypeErrors, etc.)
|
||||||
self.assertEqual(host_out.count("This timer trigger function executed "
|
errors_found = False
|
||||||
"successfully"), 1)
|
for log in host_out:
|
||||||
self.assertEqual(host_out.count("Timer trigger with none return "
|
if "Exception" in log:
|
||||||
"and no type hint"), 1)
|
errors_found = True
|
||||||
|
break
|
||||||
|
self.assertFalse(errors_found)
|
||||||
|
|
||||||
|
|
||||||
@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST)
|
@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST)
|
||||||
|
|
|
@ -1,11 +1,7 @@
|
||||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
# Licensed under the MIT License.
|
# Licensed under the MIT License.
|
||||||
import concurrent
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import typing
|
import typing
|
||||||
import unittest
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
@ -155,9 +151,7 @@ class TestCommonLibsHttpFunctions(testutils.WebHostTestCase):
|
||||||
r = self.webhost.request('GET', 'numpy_func',
|
r = self.webhost.request('GET', 'numpy_func',
|
||||||
timeout=REQUEST_TIMEOUT_SEC)
|
timeout=REQUEST_TIMEOUT_SEC)
|
||||||
|
|
||||||
res = "array: [1.+0.j 2.+0.j]"
|
self.assertIn("numpy version", r.content.decode("UTF-8"))
|
||||||
|
|
||||||
self.assertEqual(r.content.decode("UTF-8"), res)
|
|
||||||
|
|
||||||
def test_requests(self):
|
def test_requests(self):
|
||||||
r = self.webhost.request('GET', 'requests_func',
|
r = self.webhost.request('GET', 'requests_func',
|
||||||
|
@ -214,170 +208,18 @@ class TestHttpFunctionsWithInitIndexing(TestHttpFunctions):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setUpClass(cls):
|
def setUpClass(cls):
|
||||||
|
cls.env_variables[PYTHON_ENABLE_INIT_INDEXING] = '1'
|
||||||
os.environ[PYTHON_ENABLE_INIT_INDEXING] = "1"
|
os.environ[PYTHON_ENABLE_INIT_INDEXING] = "1"
|
||||||
super().setUpClass()
|
super().setUpClass()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def tearDownClass(cls):
|
def tearDownClass(cls):
|
||||||
# Remove the PYTHON_SCRIPT_FILE_NAME environment variable
|
|
||||||
os.environ.pop(PYTHON_ENABLE_INIT_INDEXING)
|
os.environ.pop(PYTHON_ENABLE_INIT_INDEXING)
|
||||||
super().tearDownClass()
|
super().tearDownClass()
|
||||||
|
|
||||||
|
|
||||||
@unittest.skipIf(sys.version_info.minor <= 7, "Skipping tests <= Python 3.7")
|
|
||||||
class TestHttpFunctionsV2FastApiWithInitIndexing(
|
|
||||||
TestHttpFunctionsWithInitIndexing):
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_script_dir(cls):
|
def get_environment_variables(cls):
|
||||||
return testutils.E2E_TESTS_FOLDER / 'http_functions' / \
|
return cls.env_variables
|
||||||
'http_functions_v2' / \
|
|
||||||
'fastapi'
|
|
||||||
|
|
||||||
@testutils.retryable_test(3, 5)
|
|
||||||
def test_return_streaming(self):
|
|
||||||
"""Test if the return_streaming function returns a streaming
|
|
||||||
response"""
|
|
||||||
root_url = self.webhost._addr
|
|
||||||
streaming_url = f'{root_url}/api/return_streaming'
|
|
||||||
r = requests.get(
|
|
||||||
streaming_url, timeout=REQUEST_TIMEOUT_SEC, stream=True)
|
|
||||||
self.assertTrue(r.ok)
|
|
||||||
# Validate streaming content
|
|
||||||
expected_content = [b'First', b' chun', b'k\nSec', b'ond c', b'hunk\n']
|
|
||||||
received_content = []
|
|
||||||
for chunk in r.iter_content(chunk_size=5):
|
|
||||||
if chunk:
|
|
||||||
received_content.append(chunk)
|
|
||||||
self.assertEqual(received_content, expected_content)
|
|
||||||
|
|
||||||
@testutils.retryable_test(3, 5)
|
|
||||||
def test_return_streaming_concurrently(self):
|
|
||||||
"""Test if the return_streaming function returns a streaming
|
|
||||||
response concurrently"""
|
|
||||||
root_url = self.webhost._addr
|
|
||||||
streaming_url = f'{root_url}/return_streaming'
|
|
||||||
|
|
||||||
# Function to make a streaming request and validate content
|
|
||||||
def make_request():
|
|
||||||
r = requests.get(streaming_url, timeout=REQUEST_TIMEOUT_SEC,
|
|
||||||
stream=True)
|
|
||||||
self.assertTrue(r.ok)
|
|
||||||
expected_content = [b"First chunk\n", b"Second chunk\n"]
|
|
||||||
received_content = []
|
|
||||||
for chunk in r.iter_content(chunk_size=1024):
|
|
||||||
if chunk:
|
|
||||||
received_content.append(chunk)
|
|
||||||
self.assertEqual(received_content, expected_content)
|
|
||||||
|
|
||||||
# Make concurrent requests
|
|
||||||
with ThreadPoolExecutor(max_workers=2) as executor:
|
|
||||||
executor.map(make_request, range(2))
|
|
||||||
|
|
||||||
@testutils.retryable_test(3, 5)
|
|
||||||
def test_return_html(self):
|
|
||||||
"""Test if the return_html function returns an HTML response"""
|
|
||||||
root_url = self.webhost._addr
|
|
||||||
html_url = f'{root_url}/api/return_html'
|
|
||||||
r = requests.get(html_url, timeout=REQUEST_TIMEOUT_SEC)
|
|
||||||
self.assertTrue(r.ok)
|
|
||||||
self.assertEqual(r.headers['content-type'],
|
|
||||||
'text/html; charset=utf-8')
|
|
||||||
# Validate HTML content
|
|
||||||
expected_html = "<html><body><h1>Hello, World!</h1></body></html>"
|
|
||||||
self.assertEqual(r.text, expected_html)
|
|
||||||
|
|
||||||
@testutils.retryable_test(3, 5)
|
|
||||||
def test_return_ujson(self):
|
|
||||||
"""Test if the return_ujson function returns a UJSON response"""
|
|
||||||
root_url = self.webhost._addr
|
|
||||||
ujson_url = f'{root_url}/api/return_ujson'
|
|
||||||
r = requests.get(ujson_url, timeout=REQUEST_TIMEOUT_SEC)
|
|
||||||
self.assertTrue(r.ok)
|
|
||||||
self.assertEqual(r.headers['content-type'], 'application/json')
|
|
||||||
self.assertEqual(r.text, '{"message":"Hello, World!"}')
|
|
||||||
|
|
||||||
@testutils.retryable_test(3, 5)
|
|
||||||
def test_return_orjson(self):
|
|
||||||
"""Test if the return_orjson function returns an ORJSON response"""
|
|
||||||
root_url = self.webhost._addr
|
|
||||||
orjson_url = f'{root_url}/api/return_orjson'
|
|
||||||
r = requests.get(orjson_url, timeout=REQUEST_TIMEOUT_SEC)
|
|
||||||
self.assertTrue(r.ok)
|
|
||||||
self.assertEqual(r.headers['content-type'], 'application/json')
|
|
||||||
self.assertEqual(r.text, '{"message":"Hello, World!"}')
|
|
||||||
|
|
||||||
@testutils.retryable_test(3, 5)
|
|
||||||
def test_return_file(self):
|
|
||||||
"""Test if the return_file function returns a file response"""
|
|
||||||
root_url = self.webhost._addr
|
|
||||||
file_url = f'{root_url}/api/return_file'
|
|
||||||
r = requests.get(file_url, timeout=REQUEST_TIMEOUT_SEC)
|
|
||||||
self.assertTrue(r.ok)
|
|
||||||
self.assertIn('@app.route(route="default_template")', r.text)
|
|
||||||
|
|
||||||
@testutils.retryable_test(3, 5)
|
|
||||||
def test_upload_data_stream(self):
|
|
||||||
"""Test if the upload_data_stream function receives streaming data
|
|
||||||
and returns the complete data"""
|
|
||||||
root_url = self.webhost._addr
|
|
||||||
upload_url = f'{root_url}/api/upload_data_stream'
|
|
||||||
|
|
||||||
# Define the streaming data
|
|
||||||
data_chunks = [b"First chunk\n", b"Second chunk\n"]
|
|
||||||
|
|
||||||
# Define a function to simulate streaming by reading from an
|
|
||||||
# iterator
|
|
||||||
def stream_data(data_chunks):
|
|
||||||
for chunk in data_chunks:
|
|
||||||
yield chunk
|
|
||||||
|
|
||||||
# Send a POST request with streaming data
|
|
||||||
r = requests.post(upload_url, data=stream_data(data_chunks))
|
|
||||||
|
|
||||||
# Assert that the request was successful
|
|
||||||
self.assertTrue(r.ok)
|
|
||||||
|
|
||||||
# Assert that the response content matches the concatenation of
|
|
||||||
# all data chunks
|
|
||||||
complete_data = b"".join(data_chunks)
|
|
||||||
self.assertEqual(r.content, complete_data)
|
|
||||||
|
|
||||||
@testutils.retryable_test(3, 5)
|
|
||||||
def test_upload_data_stream_concurrently(self):
|
|
||||||
"""Test if the upload_data_stream function receives streaming data
|
|
||||||
and returns the complete data"""
|
|
||||||
root_url = self.webhost._addr
|
|
||||||
upload_url = f'{root_url}/api/upload_data_stream'
|
|
||||||
|
|
||||||
# Define the streaming data
|
|
||||||
data_chunks = [b"First chunk\n", b"Second chunk\n"]
|
|
||||||
|
|
||||||
# Define a function to simulate streaming by reading from an
|
|
||||||
# iterator
|
|
||||||
def stream_data(data_chunks):
|
|
||||||
for chunk in data_chunks:
|
|
||||||
yield chunk
|
|
||||||
|
|
||||||
# Define the number of concurrent requests
|
|
||||||
num_requests = 5
|
|
||||||
|
|
||||||
# Define a function to send a single request
|
|
||||||
def send_request():
|
|
||||||
r = requests.post(upload_url, data=stream_data(data_chunks))
|
|
||||||
return r.ok, r.content
|
|
||||||
|
|
||||||
# Send multiple requests concurrently
|
|
||||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
||||||
futures = [executor.submit(send_request) for _ in
|
|
||||||
range(num_requests)]
|
|
||||||
|
|
||||||
# Assert that all requests were successful and the response
|
|
||||||
# contents are correct
|
|
||||||
for future in concurrent.futures.as_completed(futures):
|
|
||||||
ok, content = future.result()
|
|
||||||
self.assertTrue(ok)
|
|
||||||
complete_data = b"".join(data_chunks)
|
|
||||||
self.assertEqual(content, complete_data)
|
|
||||||
|
|
||||||
|
|
||||||
class TestUserThreadLoggingHttpFunctions(testutils.WebHostTestCase):
|
class TestUserThreadLoggingHttpFunctions(testutils.WebHostTestCase):
|
||||||
|
|
|
@ -22,11 +22,12 @@ class TestWorkerProcessCount(testutils.WebHostTestCase):
|
||||||
|
|
||||||
super().setUpClass()
|
super().setUpClass()
|
||||||
|
|
||||||
def tearDown(self):
|
@classmethod
|
||||||
|
def tearDownClass(cls):
|
||||||
os.environ.pop('PYTHON_THREADPOOL_THREAD_COUNT')
|
os.environ.pop('PYTHON_THREADPOOL_THREAD_COUNT')
|
||||||
os.environ.pop('FUNCTIONS_WORKER_PROCESS_COUNT')
|
os.environ.pop('FUNCTIONS_WORKER_PROCESS_COUNT')
|
||||||
|
|
||||||
super().tearDown()
|
super().tearDownClass()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_script_dir(cls):
|
def get_script_dir(cls):
|
||||||
|
|
|
@ -251,13 +251,13 @@ def put_blob_bytes(req: func.HttpRequest, file: func.Out[bytes]) -> str:
|
||||||
|
|
||||||
|
|
||||||
@app.function_name(name="blob_cache")
|
@app.function_name(name="blob_cache")
|
||||||
@app.blob_input(arg_name="client",
|
@app.blob_input(arg_name="cachedClient",
|
||||||
path="python-worker-tests/test-blobclient-triggered.txt",
|
path="python-worker-tests/test-blobclient-triggered.txt",
|
||||||
connection="AzureWebJobsStorage")
|
connection="AzureWebJobsStorage")
|
||||||
@app.route(route="blob_cache")
|
@app.route(route="blob_cache")
|
||||||
def blob_cache(req: func.HttpRequest,
|
def blob_cache(req: func.HttpRequest,
|
||||||
client: blob.BlobClient) -> str:
|
cachedClient: blob.BlobClient) -> str:
|
||||||
return client.download_blob(encoding='utf-8').readall()
|
return cachedClient.download_blob(encoding='utf-8').readall()
|
||||||
|
|
||||||
|
|
||||||
@app.function_name(name="aio_blob_client")
|
@app.function_name(name="aio_blob_client")
|
||||||
|
|
|
@ -17,6 +17,10 @@ class TestDeferredBindingsBlobFunctions(testutils.WebHostTestCase):
|
||||||
return testutils.EXTENSION_TESTS_FOLDER / 'deferred_bindings_tests' / \
|
return testutils.EXTENSION_TESTS_FOLDER / 'deferred_bindings_tests' / \
|
||||||
'deferred_bindings_blob_functions'
|
'deferred_bindings_blob_functions'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_libraries_to_install(cls):
|
||||||
|
return ['azurefunctions-extensions-bindings-blob']
|
||||||
|
|
||||||
def test_blob_str(self):
|
def test_blob_str(self):
|
||||||
r = self.webhost.request('POST', 'put_blob_str', data='test-data')
|
r = self.webhost.request('POST', 'put_blob_str', data='test-data')
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
|
@ -0,0 +1,189 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
import concurrent
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING
|
||||||
|
from tests.utils import testutils
|
||||||
|
|
||||||
|
REQUEST_TIMEOUT_SEC = 5
|
||||||
|
|
||||||
|
|
||||||
|
@unittest.skipIf(sys.version_info.minor < 8, "HTTPv2"
|
||||||
|
"is only supported for 3.8+.")
|
||||||
|
class TestHttpFunctionsWithInitIndexing(testutils.WebHostTestCase):
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
cls.env_variables[PYTHON_ENABLE_INIT_INDEXING] = '1'
|
||||||
|
os.environ[PYTHON_ENABLE_INIT_INDEXING] = "1"
|
||||||
|
super().setUpClass()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def tearDownClass(cls):
|
||||||
|
os.environ.pop(PYTHON_ENABLE_INIT_INDEXING)
|
||||||
|
super().tearDownClass()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_environment_variables(cls):
|
||||||
|
return cls.env_variables
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_script_dir(cls):
|
||||||
|
return testutils.EXTENSION_TESTS_FOLDER / 'http_v2_tests' / \
|
||||||
|
'http_functions_v2' / \
|
||||||
|
'fastapi'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_libraries_to_install(cls):
|
||||||
|
return ['azurefunctions-extensions-http-fastapi', 'orjson', 'ujson']
|
||||||
|
|
||||||
|
@testutils.retryable_test(3, 5)
|
||||||
|
def test_return_streaming(self):
|
||||||
|
"""Test if the return_streaming function returns a streaming
|
||||||
|
response"""
|
||||||
|
root_url = self.webhost._addr
|
||||||
|
streaming_url = f'{root_url}/api/return_streaming'
|
||||||
|
r = requests.get(
|
||||||
|
streaming_url, timeout=REQUEST_TIMEOUT_SEC, stream=True)
|
||||||
|
self.assertTrue(r.ok)
|
||||||
|
# Validate streaming content
|
||||||
|
expected_content = [b'First', b' chun', b'k\nSec', b'ond c', b'hunk\n']
|
||||||
|
received_content = []
|
||||||
|
for chunk in r.iter_content(chunk_size=5):
|
||||||
|
if chunk:
|
||||||
|
received_content.append(chunk)
|
||||||
|
self.assertEqual(received_content, expected_content)
|
||||||
|
|
||||||
|
@testutils.retryable_test(3, 5)
|
||||||
|
def test_return_streaming_concurrently(self):
|
||||||
|
"""Test if the return_streaming function returns a streaming
|
||||||
|
response concurrently"""
|
||||||
|
root_url = self.webhost._addr
|
||||||
|
streaming_url = f'{root_url}/return_streaming'
|
||||||
|
|
||||||
|
# Function to make a streaming request and validate content
|
||||||
|
def make_request():
|
||||||
|
r = requests.get(streaming_url, timeout=REQUEST_TIMEOUT_SEC,
|
||||||
|
stream=True)
|
||||||
|
self.assertTrue(r.ok)
|
||||||
|
expected_content = [b"First chunk\n", b"Second chunk\n"]
|
||||||
|
received_content = []
|
||||||
|
for chunk in r.iter_content(chunk_size=1024):
|
||||||
|
if chunk:
|
||||||
|
received_content.append(chunk)
|
||||||
|
self.assertEqual(received_content, expected_content)
|
||||||
|
|
||||||
|
# Make concurrent requests
|
||||||
|
with ThreadPoolExecutor(max_workers=2) as executor:
|
||||||
|
executor.map(make_request, range(2))
|
||||||
|
|
||||||
|
@testutils.retryable_test(3, 5)
|
||||||
|
def test_return_html(self):
|
||||||
|
"""Test if the return_html function returns an HTML response"""
|
||||||
|
root_url = self.webhost._addr
|
||||||
|
html_url = f'{root_url}/api/return_html'
|
||||||
|
r = requests.get(html_url, timeout=REQUEST_TIMEOUT_SEC)
|
||||||
|
self.assertTrue(r.ok)
|
||||||
|
self.assertEqual(r.headers['content-type'],
|
||||||
|
'text/html; charset=utf-8')
|
||||||
|
# Validate HTML content
|
||||||
|
expected_html = "<html><body><h1>Hello, World!</h1></body></html>"
|
||||||
|
self.assertEqual(r.text, expected_html)
|
||||||
|
|
||||||
|
@testutils.retryable_test(3, 5)
|
||||||
|
def test_return_ujson(self):
|
||||||
|
"""Test if the return_ujson function returns a UJSON response"""
|
||||||
|
root_url = self.webhost._addr
|
||||||
|
ujson_url = f'{root_url}/api/return_ujson'
|
||||||
|
r = requests.get(ujson_url, timeout=REQUEST_TIMEOUT_SEC)
|
||||||
|
self.assertTrue(r.ok)
|
||||||
|
self.assertEqual(r.headers['content-type'], 'application/json')
|
||||||
|
self.assertEqual(r.text, '{"message":"Hello, World!"}')
|
||||||
|
|
||||||
|
@testutils.retryable_test(3, 5)
|
||||||
|
def test_return_orjson(self):
|
||||||
|
"""Test if the return_orjson function returns an ORJSON response"""
|
||||||
|
root_url = self.webhost._addr
|
||||||
|
orjson_url = f'{root_url}/api/return_orjson'
|
||||||
|
r = requests.get(orjson_url, timeout=REQUEST_TIMEOUT_SEC)
|
||||||
|
self.assertTrue(r.ok)
|
||||||
|
self.assertEqual(r.headers['content-type'], 'application/json')
|
||||||
|
self.assertEqual(r.text, '{"message":"Hello, World!"}')
|
||||||
|
|
||||||
|
@testutils.retryable_test(3, 5)
|
||||||
|
def test_return_file(self):
|
||||||
|
"""Test if the return_file function returns a file response"""
|
||||||
|
root_url = self.webhost._addr
|
||||||
|
file_url = f'{root_url}/api/return_file'
|
||||||
|
r = requests.get(file_url, timeout=REQUEST_TIMEOUT_SEC)
|
||||||
|
self.assertTrue(r.ok)
|
||||||
|
self.assertIn('@app.route(route="default_template")', r.text)
|
||||||
|
|
||||||
|
@testutils.retryable_test(3, 5)
|
||||||
|
def test_upload_data_stream(self):
|
||||||
|
"""Test if the upload_data_stream function receives streaming data
|
||||||
|
and returns the complete data"""
|
||||||
|
root_url = self.webhost._addr
|
||||||
|
upload_url = f'{root_url}/api/upload_data_stream'
|
||||||
|
|
||||||
|
# Define the streaming data
|
||||||
|
data_chunks = [b"First chunk\n", b"Second chunk\n"]
|
||||||
|
|
||||||
|
# Define a function to simulate streaming by reading from an
|
||||||
|
# iterator
|
||||||
|
def stream_data(data_chunks):
|
||||||
|
for chunk in data_chunks:
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
# Send a POST request with streaming data
|
||||||
|
r = requests.post(upload_url, data=stream_data(data_chunks))
|
||||||
|
|
||||||
|
# Assert that the request was successful
|
||||||
|
self.assertTrue(r.ok)
|
||||||
|
|
||||||
|
# Assert that the response content matches the concatenation of
|
||||||
|
# all data chunks
|
||||||
|
complete_data = b"".join(data_chunks)
|
||||||
|
self.assertEqual(r.content, complete_data)
|
||||||
|
|
||||||
|
@testutils.retryable_test(3, 5)
|
||||||
|
def test_upload_data_stream_concurrently(self):
|
||||||
|
"""Test if the upload_data_stream function receives streaming data
|
||||||
|
and returns the complete data"""
|
||||||
|
root_url = self.webhost._addr
|
||||||
|
upload_url = f'{root_url}/api/upload_data_stream'
|
||||||
|
|
||||||
|
# Define the streaming data
|
||||||
|
data_chunks = [b"First chunk\n", b"Second chunk\n"]
|
||||||
|
|
||||||
|
# Define a function to simulate streaming by reading from an
|
||||||
|
# iterator
|
||||||
|
def stream_data(data_chunks):
|
||||||
|
for chunk in data_chunks:
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
# Define the number of concurrent requests
|
||||||
|
num_requests = 5
|
||||||
|
|
||||||
|
# Define a function to send a single request
|
||||||
|
def send_request():
|
||||||
|
r = requests.post(upload_url, data=stream_data(data_chunks))
|
||||||
|
return r.ok, r.content
|
||||||
|
|
||||||
|
# Send multiple requests concurrently
|
||||||
|
with concurrent.futures.ThreadPoolExecutor() as executor:
|
||||||
|
futures = [executor.submit(send_request) for _ in
|
||||||
|
range(num_requests)]
|
||||||
|
|
||||||
|
# Assert that all requests were successful and the response
|
||||||
|
# contents are correct
|
||||||
|
for future in concurrent.futures.as_completed(futures):
|
||||||
|
ok, content = future.result()
|
||||||
|
self.assertTrue(ok)
|
||||||
|
complete_data = b"".join(data_chunks)
|
||||||
|
self.assertEqual(content, complete_data)
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"direction": "out",
|
||||||
|
"name": "$return",
|
||||||
|
"type": "foobar",
|
||||||
|
"dataType": "binary"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
return "hello"
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"type": "foobar",
|
||||||
|
"name": "input",
|
||||||
|
"direction": "in",
|
||||||
|
"dataType": "string"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
|
||||||
|
def main(input):
|
||||||
|
return True
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"type": "foobar",
|
||||||
|
"name": "input",
|
||||||
|
"direction": "in",
|
||||||
|
"dataType": "string"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
|
||||||
|
def main(input):
|
||||||
|
return {"hello": "world"}
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"type": "foobar",
|
||||||
|
"name": "input",
|
||||||
|
"direction": "in",
|
||||||
|
"dataType": "string"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
|
||||||
|
def main(input):
|
||||||
|
return 12.34
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"type": "foobar",
|
||||||
|
"name": "input",
|
||||||
|
"direction": "in",
|
||||||
|
"dataType": "string"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
|
||||||
|
def main(input):
|
||||||
|
return 12
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"scriptFile": "main.py",
|
||||||
|
"bindings": [
|
||||||
|
{
|
||||||
|
"type": "foobar",
|
||||||
|
"name": "input",
|
||||||
|
"direction": "in",
|
||||||
|
"dataType": "string"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
# Licensed under the MIT License.
|
||||||
|
|
||||||
|
|
||||||
|
def main(input):
|
||||||
|
return [1, 2, 3]
|
|
@ -94,6 +94,7 @@ class TestHttpFunctionsV2FastApi(testutils.WebHostTestCase):
|
||||||
self.assertIn('hello info', host_out)
|
self.assertIn('hello info', host_out)
|
||||||
self.assertIn('and another error', host_out)
|
self.assertIn('and another error', host_out)
|
||||||
|
|
||||||
|
@unittest.skipIf(sys.version_info.minor >= 7, "Skipping for ADO")
|
||||||
def test_debug_logging(self):
|
def test_debug_logging(self):
|
||||||
r = self.webhost.request('GET', 'debug_logging')
|
r = self.webhost.request('GET', 'debug_logging')
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
@ -105,6 +106,7 @@ class TestHttpFunctionsV2FastApi(testutils.WebHostTestCase):
|
||||||
self.assertIn('logging error', host_out)
|
self.assertIn('logging error', host_out)
|
||||||
self.assertNotIn('logging debug', host_out)
|
self.assertNotIn('logging debug', host_out)
|
||||||
|
|
||||||
|
@unittest.skipIf(sys.version_info.minor >= 7, "Skipping for ADO")
|
||||||
def test_debug_with_user_logging(self):
|
def test_debug_with_user_logging(self):
|
||||||
r = self.webhost.request('GET', 'debug_user_logging')
|
r = self.webhost.request('GET', 'debug_user_logging')
|
||||||
self.assertEqual(r.status_code, 200)
|
self.assertEqual(r.status_code, 200)
|
||||||
|
|
|
@ -6,6 +6,7 @@ import pathlib
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
|
from unittest import skipIf
|
||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from azure.functions import Function
|
from azure.functions import Function
|
||||||
|
@ -205,6 +206,7 @@ class TestLoader(testutils.WebHostTestCase):
|
||||||
|
|
||||||
class TestPluginLoader(testutils.AsyncTestCase):
|
class TestPluginLoader(testutils.AsyncTestCase):
|
||||||
|
|
||||||
|
@skipIf(sys.version_info.minor <= 7, "Skipping tests <= Python 3.7")
|
||||||
async def test_entry_point_plugin(self):
|
async def test_entry_point_plugin(self):
|
||||||
test_binding = pathlib.Path(__file__).parent / 'test-binding'
|
test_binding = pathlib.Path(__file__).parent / 'test-binding'
|
||||||
subprocess.run([
|
subprocess.run([
|
||||||
|
|
|
@ -56,5 +56,5 @@ class TestLogging(unittest.TestCase):
|
||||||
self.assertIn("call1", processed_exception)
|
self.assertIn("call1", processed_exception)
|
||||||
self.assertIn("call2", processed_exception)
|
self.assertIn("call2", processed_exception)
|
||||||
self.assertIn("f", processed_exception)
|
self.assertIn("f", processed_exception)
|
||||||
self.assertIn("tests/unittests/test_logging.py",
|
self.assertRegex(processed_exception,
|
||||||
processed_exception)
|
r".*tests\\unittests\\test_logging.py.*")
|
||||||
|
|
|
@ -173,6 +173,9 @@ class TestGenericFunctions(testutils.AsyncTestCase):
|
||||||
# implicitly
|
# implicitly
|
||||||
self.assertEqual(r.response.result.status,
|
self.assertEqual(r.response.result.status,
|
||||||
protos.StatusResult.Success)
|
protos.StatusResult.Success)
|
||||||
|
self.assertEqual(
|
||||||
|
r.response.return_value,
|
||||||
|
protos.TypedData(bytes=b'\x00\x01'))
|
||||||
|
|
||||||
async def test_mock_generic_implicit_output_exemption(self):
|
async def test_mock_generic_implicit_output_exemption(self):
|
||||||
async with testutils.start_mockhost(
|
async with testutils.start_mockhost(
|
||||||
|
@ -223,3 +226,164 @@ class TestGenericFunctions(testutils.AsyncTestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
r.response.return_value,
|
r.response.return_value,
|
||||||
protos.TypedData())
|
protos.TypedData())
|
||||||
|
|
||||||
|
async def test_mock_generic_as_none(self):
|
||||||
|
async with testutils.start_mockhost(
|
||||||
|
script_root=self.generic_funcs_dir) as host:
|
||||||
|
|
||||||
|
await host.init_worker("4.17.1")
|
||||||
|
func_id, r = await host.load_function('foobar_as_none')
|
||||||
|
|
||||||
|
self.assertEqual(r.response.function_id, func_id)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
|
||||||
|
_, r = await host.invoke_function(
|
||||||
|
'foobar_as_none', [
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
self.assertEqual(
|
||||||
|
r.response.return_value,
|
||||||
|
protos.TypedData(string="hello"))
|
||||||
|
|
||||||
|
async def test_mock_generic_return_dict(self):
|
||||||
|
async with testutils.start_mockhost(
|
||||||
|
script_root=self.generic_funcs_dir) as host:
|
||||||
|
|
||||||
|
await host.init_worker("4.17.1")
|
||||||
|
func_id, r = await host.load_function('foobar_return_dict')
|
||||||
|
|
||||||
|
self.assertEqual(r.response.function_id, func_id)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
|
||||||
|
_, r = await host.invoke_function(
|
||||||
|
'foobar_return_dict', [
|
||||||
|
protos.ParameterBinding(
|
||||||
|
name='input',
|
||||||
|
data=protos.TypedData(
|
||||||
|
string='test'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
self.assertEqual(
|
||||||
|
r.response.return_value,
|
||||||
|
protos.TypedData(json="{\"hello\": \"world\"}")
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_mock_generic_return_list(self):
|
||||||
|
async with testutils.start_mockhost(
|
||||||
|
script_root=self.generic_funcs_dir) as host:
|
||||||
|
|
||||||
|
await host.init_worker("4.17.1")
|
||||||
|
func_id, r = await host.load_function('foobar_return_list')
|
||||||
|
|
||||||
|
self.assertEqual(r.response.function_id, func_id)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
|
||||||
|
_, r = await host.invoke_function(
|
||||||
|
'foobar_return_list', [
|
||||||
|
protos.ParameterBinding(
|
||||||
|
name='input',
|
||||||
|
data=protos.TypedData(
|
||||||
|
string='test'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
self.assertEqual(
|
||||||
|
r.response.return_value,
|
||||||
|
protos.TypedData(json="[1, 2, 3]")
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_mock_generic_return_int(self):
|
||||||
|
async with testutils.start_mockhost(
|
||||||
|
script_root=self.generic_funcs_dir) as host:
|
||||||
|
|
||||||
|
await host.init_worker("4.17.1")
|
||||||
|
func_id, r = await host.load_function('foobar_return_int')
|
||||||
|
|
||||||
|
self.assertEqual(r.response.function_id, func_id)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
|
||||||
|
_, r = await host.invoke_function(
|
||||||
|
'foobar_return_int', [
|
||||||
|
protos.ParameterBinding(
|
||||||
|
name='input',
|
||||||
|
data=protos.TypedData(
|
||||||
|
string='test'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
self.assertEqual(
|
||||||
|
r.response.return_value,
|
||||||
|
protos.TypedData(int=12)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_mock_generic_return_double(self):
|
||||||
|
async with testutils.start_mockhost(
|
||||||
|
script_root=self.generic_funcs_dir) as host:
|
||||||
|
|
||||||
|
await host.init_worker("4.17.1")
|
||||||
|
func_id, r = await host.load_function('foobar_return_double')
|
||||||
|
|
||||||
|
self.assertEqual(r.response.function_id, func_id)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
|
||||||
|
_, r = await host.invoke_function(
|
||||||
|
'foobar_return_double', [
|
||||||
|
protos.ParameterBinding(
|
||||||
|
name='input',
|
||||||
|
data=protos.TypedData(
|
||||||
|
string='test'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
self.assertEqual(
|
||||||
|
r.response.return_value,
|
||||||
|
protos.TypedData(double=12.34)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def test_mock_generic_return_bool(self):
|
||||||
|
async with testutils.start_mockhost(
|
||||||
|
script_root=self.generic_funcs_dir) as host:
|
||||||
|
|
||||||
|
await host.init_worker("4.17.1")
|
||||||
|
func_id, r = await host.load_function('foobar_return_bool')
|
||||||
|
|
||||||
|
self.assertEqual(r.response.function_id, func_id)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
|
||||||
|
_, r = await host.invoke_function(
|
||||||
|
'foobar_return_bool', [
|
||||||
|
protos.ParameterBinding(
|
||||||
|
name='input',
|
||||||
|
data=protos.TypedData(
|
||||||
|
string='test'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
self.assertEqual(r.response.result.status,
|
||||||
|
protos.StatusResult.Success)
|
||||||
|
self.assertEqual(
|
||||||
|
r.response.return_value,
|
||||||
|
protos.TypedData(int=1)
|
||||||
|
)
|
||||||
|
|
|
@ -132,21 +132,23 @@ class WebHostDockerContainerBase(unittest.TestCase):
|
||||||
)
|
)
|
||||||
|
|
||||||
function_path = "/home/site/wwwroot"
|
function_path = "/home/site/wwwroot"
|
||||||
|
configs.libraries = ((configs.libraries or [])
|
||||||
|
+ ['azurefunctions-extensions-base'])
|
||||||
|
install_libraries_cmd = []
|
||||||
|
install_libraries_cmd.extend(['pip', 'install'])
|
||||||
|
install_libraries_cmd.extend(['--platform=manylinux2014_x86_64'])
|
||||||
|
install_libraries_cmd.extend(configs.libraries)
|
||||||
|
install_libraries_cmd.extend(['-t',
|
||||||
|
f'{script_path}/{_libraries_path}'])
|
||||||
|
install_libraries_cmd.extend(['--only-binary=:all:'])
|
||||||
|
|
||||||
if configs.libraries:
|
install_libraries_process = \
|
||||||
install_libraries_cmd = []
|
subprocess.run(args=install_libraries_cmd,
|
||||||
install_libraries_cmd.extend(['pip', 'install'])
|
stdout=subprocess.PIPE,
|
||||||
install_libraries_cmd.extend(configs.libraries)
|
stderr=subprocess.PIPE)
|
||||||
install_libraries_cmd.extend(['-t',
|
|
||||||
f'{script_path}/{_libraries_path}'])
|
|
||||||
|
|
||||||
install_libraries_process = \
|
if install_libraries_process.returncode != 0:
|
||||||
subprocess.run(args=install_libraries_cmd,
|
raise RuntimeError('Failed to install libraries')
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE)
|
|
||||||
|
|
||||||
if install_libraries_process.returncode != 0:
|
|
||||||
raise RuntimeError('Failed to install libraries')
|
|
||||||
|
|
||||||
run_cmd = []
|
run_cmd = []
|
||||||
run_cmd.extend([_docker_cmd, "run", "-p", "0:80", "-d"])
|
run_cmd.extend([_docker_cmd, "run", "-p", "0:80", "-d"])
|
||||||
|
|
|
@ -27,7 +27,6 @@ _DOCKER_PATH = "DOCKER_PATH"
|
||||||
_DOCKER_DEFAULT_PATH = "docker"
|
_DOCKER_DEFAULT_PATH = "docker"
|
||||||
_MESH_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/mesh/tags/list"
|
_MESH_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/mesh/tags/list"
|
||||||
_MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh"
|
_MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh"
|
||||||
_DUMMY_CONT_KEY = "MDEyMzQ1Njc4OUFCQ0RFRjAxMjM0NTY3ODlBQkNERUY="
|
|
||||||
_FUNC_GITHUB_ZIP = "https://github.com/Azure/azure-functions-python-library" \
|
_FUNC_GITHUB_ZIP = "https://github.com/Azure/azure-functions-python-library" \
|
||||||
"/archive/refs/heads/dev.zip"
|
"/archive/refs/heads/dev.zip"
|
||||||
_FUNC_FILE_NAME = "azure-functions-python-library-dev"
|
_FUNC_FILE_NAME = "azure-functions-python-library-dev"
|
||||||
|
@ -198,7 +197,8 @@ class LinuxConsumptionWebHostController:
|
||||||
run_cmd.extend(["--cap-add", "SYS_ADMIN"])
|
run_cmd.extend(["--cap-add", "SYS_ADMIN"])
|
||||||
run_cmd.extend(["--device", "/dev/fuse"])
|
run_cmd.extend(["--device", "/dev/fuse"])
|
||||||
run_cmd.extend(["-e", f"CONTAINER_NAME={self._uuid}"])
|
run_cmd.extend(["-e", f"CONTAINER_NAME={self._uuid}"])
|
||||||
run_cmd.extend(["-e", f"CONTAINER_ENCRYPTION_KEY={_DUMMY_CONT_KEY}"])
|
run_cmd.extend(["-e",
|
||||||
|
f"CONTAINER_ENCRYPTION_KEY={os.getenv('_DUMMY_CONT_KEY')}"])
|
||||||
run_cmd.extend(["-e", "WEBSITE_PLACEHOLDER_MODE=1"])
|
run_cmd.extend(["-e", "WEBSITE_PLACEHOLDER_MODE=1"])
|
||||||
run_cmd.extend(["-v", f'{worker_path}:{container_worker_path}'])
|
run_cmd.extend(["-v", f'{worker_path}:{container_worker_path}'])
|
||||||
run_cmd.extend(["-v",
|
run_cmd.extend(["-v",
|
||||||
|
@ -266,7 +266,7 @@ class LinuxConsumptionWebHostController:
|
||||||
which expires in one day.
|
which expires in one day.
|
||||||
"""
|
"""
|
||||||
exp_ns = int(time.time() + 24 * 60 * 60) * 1000000000
|
exp_ns = int(time.time() + 24 * 60 * 60) * 1000000000
|
||||||
return cls._encrypt_context(_DUMMY_CONT_KEY, f'exp={exp_ns}')
|
return cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), f'exp={exp_ns}')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _get_site_encrypted_context(cls,
|
def _get_site_encrypted_context(cls,
|
||||||
|
@ -281,7 +281,7 @@ class LinuxConsumptionWebHostController:
|
||||||
|
|
||||||
# Ensure WEBSITE_SITE_NAME is set to simulate production mode
|
# Ensure WEBSITE_SITE_NAME is set to simulate production mode
|
||||||
ctx["Environment"]["WEBSITE_SITE_NAME"] = site_name
|
ctx["Environment"]["WEBSITE_SITE_NAME"] = site_name
|
||||||
return cls._encrypt_context(_DUMMY_CONT_KEY, json.dumps(ctx))
|
return cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), json.dumps(ctx))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _encrypt_context(cls, encryption_key: str, plain_text: str) -> str:
|
def _encrypt_context(cls, encryption_key: str, plain_text: str) -> str:
|
||||||
|
|
Загрузка…
Ссылка в новой задаче