Docker tests updates (#1457)
* Docker tests updates * Fixed consumption custom image if empty * Updated extension csproj to use .net8 * Update extensions to include System.Drawing.Common * Update extensions to include System.Text.Json * Added table extension * table & eh, unit, lint * eh test * lint --------- Co-authored-by: hallvictoria <victoriahall@microsoft.com>
This commit is contained in:
Родитель
4c8fb7bab9
Коммит
30789ed5ca
|
@ -5,10 +5,14 @@ name: CI Consumption E2E tests
|
|||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
custom_image:
|
||||
description: "Use a custom image to run consumption tests"
|
||||
required: false
|
||||
push:
|
||||
branches: [ dev, master, main, release/* ]
|
||||
branches: [ dev, main, release/* ]
|
||||
pull_request:
|
||||
branches: [ dev, master, main, release/* ]
|
||||
branches: [ dev, main, release/* ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
@ -35,24 +39,29 @@ jobs:
|
|||
if: matrix.python-version == 3.7
|
||||
env:
|
||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }}
|
||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
||||
- name: Running 3.8 Tests
|
||||
if: matrix.python-version == 3.8
|
||||
env:
|
||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }}
|
||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
||||
- name: Running 3.9 Tests
|
||||
if: matrix.python-version == 3.9
|
||||
env:
|
||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }}
|
||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
||||
- name: Running 3.10 Tests
|
||||
if: matrix.python-version == 3.10
|
||||
env:
|
||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }}
|
||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
||||
- name: Running 3.11 Tests
|
||||
if: matrix.python-version == 3.11
|
||||
env:
|
||||
AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }}
|
||||
CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }}
|
||||
run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests
|
||||
|
|
|
@ -5,9 +5,8 @@ name: CI Docker Consumption tests
|
|||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Monday to Thursday 2 AM PDT build
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
- cron: "0 9 * * *"
|
||||
# Run everyday at 5 AM CST
|
||||
- cron: "0 10 * * *"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
@ -30,7 +29,6 @@ jobs:
|
|||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install -U -e .[dev]
|
||||
python setup.py build
|
||||
- name: Running 3.7 Tests
|
||||
if: matrix.python-version == 3.7
|
||||
|
|
|
@ -49,7 +49,7 @@ jobs:
|
|||
python-version: ${{ env.python_version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install -U -e .[dev]
|
||||
python setup.py build
|
||||
- name: Running 3.7 Tests
|
||||
if: env.python_version == 3.7
|
||||
env:
|
||||
|
|
|
@ -5,9 +5,8 @@ name: CI Docker Dedicated tests
|
|||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Monday to Thursday 1 AM PDT build
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
- cron: "0 8 * * *"
|
||||
# Run everyday at 4 AM CST
|
||||
- cron: "0 9 * * *"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
@ -30,7 +29,6 @@ jobs:
|
|||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install -U -e .[dev]
|
||||
python setup.py build
|
||||
- name: Running 3.7 Tests
|
||||
if: matrix.python-version == 3.7
|
||||
|
|
|
@ -11,11 +11,11 @@ on:
|
|||
required: false
|
||||
default: "false"
|
||||
push:
|
||||
branches: [dev, master, main, release/*]
|
||||
branches: [dev, main, release/*]
|
||||
pull_request:
|
||||
branches: [dev, master, main, release/*]
|
||||
branches: [dev, main, release/*]
|
||||
schedule:
|
||||
# Monday to Thursday 1 AM PDT build
|
||||
# Monday to Thursday 3 AM CST build
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
- cron: "0 8 * * 1,2,3,4"
|
||||
|
||||
|
@ -35,10 +35,6 @@ jobs:
|
|||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Set up Dotnet 6.x
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
dotnet-version: "6.x"
|
||||
- name: Set up Dotnet 8.0.x
|
||||
uses: actions/setup-dotnet@v4
|
||||
with:
|
||||
|
|
|
@ -11,12 +11,12 @@ on:
|
|||
required: false
|
||||
default: "false"
|
||||
schedule:
|
||||
# Monday to Thursday 1 AM PDT build
|
||||
# Monday to Thursday 3 AM CST build
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
- cron: "0 8 * * 1,2,3,4"
|
||||
push:
|
||||
pull_request:
|
||||
branches: [ dev, master, main, release/* ]
|
||||
branches: [ dev, main, release/* ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
@ -63,6 +63,7 @@ jobs:
|
|||
# Retry a couple times to avoid certificate issue
|
||||
retry 5 python setup.py build
|
||||
retry 5 python setup.py webhost --branch-name=dev
|
||||
retry 5 python setup.py extension
|
||||
mkdir logs
|
||||
- name: Test with pytest
|
||||
env:
|
||||
|
|
|
@ -15,7 +15,7 @@ What are the supported Python versions?
|
|||
|----------------------------------|------------|------------|------------|------------|-------------|-------------|
|
||||
| Azure Functions 2.0 (deprecated) | ✔ | ✔ | - | - | - | - |
|
||||
| Azure Functions 3.0 (deprecated) | ✔ | ✔ | ✔ | ✔ | - | - |
|
||||
| Azure Functions 4.0 | - | ✔ | ✔ | ✔ | ✔ | coming soon |
|
||||
| Azure Functions 4.0 | - | - | ✔ | ✔ | ✔ | ✔ |
|
||||
|
||||
For information about Azure Functions Runtime, please refer to [Azure Functions runtime versions overview](https://docs.microsoft.com/en-us/azure/azure-functions/functions-versions) page.
|
||||
|
||||
|
|
|
@ -18,10 +18,10 @@ app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS)
|
|||
connection="AzureWebJobsEventHubConnectionString",
|
||||
data_type="string",
|
||||
cardinality="many")
|
||||
@app.table_output(arg_name="$return",
|
||||
connection="AzureWebJobsStorage",
|
||||
table_name="EventHubBatchTest")
|
||||
def eventhub_multiple(events):
|
||||
@app.blob_output(arg_name="$return",
|
||||
path="python-worker-tests/test-eventhub-batch-triggered.txt",
|
||||
connection="AzureWebJobsStorage")
|
||||
def eventhub_multiple(events) -> str:
|
||||
table_entries = []
|
||||
for event in events:
|
||||
json_entry = event.get_body()
|
||||
|
@ -46,13 +46,12 @@ def eventhub_output_batch(req: func.HttpRequest, out: func.Out[str]) -> str:
|
|||
|
||||
# Retrieve the event data from storage blob and return it as Http response
|
||||
@app.function_name(name="get_eventhub_batch_triggered")
|
||||
@app.route(route="get_eventhub_batch_triggered/{id}")
|
||||
@app.table_input(arg_name="testEntities",
|
||||
connection="AzureWebJobsStorage",
|
||||
table_name="EventHubBatchTest",
|
||||
partition_key="{id}")
|
||||
def get_eventhub_batch_triggered(req: func.HttpRequest, testEntities):
|
||||
return func.HttpResponse(status_code=200, body=testEntities)
|
||||
@app.route(route="get_eventhub_batch_triggered")
|
||||
@app.blob_input(arg_name="testEntities",
|
||||
path="python-worker-tests/test-eventhub-batch-triggered.txt",
|
||||
connection="AzureWebJobsStorage")
|
||||
def get_eventhub_batch_triggered(req: func.HttpRequest, testEntities: func.InputStream):
|
||||
return func.HttpResponse(status_code=200, body=testEntities.read().decode('utf-8'))
|
||||
|
||||
|
||||
# Retrieve the event data from storage blob and return it as Http response
|
||||
|
|
|
@ -5,7 +5,7 @@ import json
|
|||
|
||||
# This is an actual EventHub trigger which handles Eventhub events in batches.
|
||||
# It serializes multiple event data into a json and store it into a blob.
|
||||
def main(events):
|
||||
def main(events) -> str:
|
||||
table_entries = []
|
||||
for event in events:
|
||||
json_entry = event.get_body()
|
||||
|
|
|
@ -13,9 +13,9 @@
|
|||
},
|
||||
{
|
||||
"direction": "out",
|
||||
"type": "table",
|
||||
"type": "blob",
|
||||
"name": "$return",
|
||||
"tableName": "EventHubBatchTest",
|
||||
"path": "python-worker-tests/test-eventhub-batch-triggered.txt",
|
||||
"connection": "AzureWebJobsStorage"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -4,5 +4,5 @@ import azure.functions as func
|
|||
|
||||
|
||||
# Retrieve the event data from storage blob and return it as Http response
|
||||
def main(req: func.HttpRequest, testEntities):
|
||||
return func.HttpResponse(status_code=200, body=testEntities)
|
||||
def main(req: func.HttpRequest, testEntities: func.InputStream):
|
||||
return func.HttpResponse(status_code=200, body=testEntities.read().decode('utf-8'))
|
||||
|
|
|
@ -12,10 +12,9 @@
|
|||
},
|
||||
{
|
||||
"direction": "in",
|
||||
"type": "table",
|
||||
"type": "blob",
|
||||
"name": "testEntities",
|
||||
"partitionKey": "WillBePopulated",
|
||||
"tableName": "EventHubBatchTest",
|
||||
"path": "python-worker-tests/test-eventhub-batch-triggered.txt",
|
||||
"connection": "AzureWebJobsStorage"
|
||||
},
|
||||
{
|
||||
|
|
|
@ -15,8 +15,7 @@ app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS)
|
|||
row_key='{id}',
|
||||
partition_key="test")
|
||||
def table_in_binding(req: func.HttpRequest, testEntity):
|
||||
headers_dict = json.loads(testEntity)
|
||||
return func.HttpResponse(status_code=200, headers=headers_dict)
|
||||
return func.HttpResponse(status_code=200, body=testEntity)
|
||||
|
||||
|
||||
@app.function_name(name="table_out_binding")
|
||||
|
@ -28,6 +27,5 @@ def table_out_binding(req: func.HttpRequest, resp: func.Out[func.HttpResponse]):
|
|||
row_key_uuid = str(uuid.uuid4())
|
||||
table_dict = {'PartitionKey': 'test', 'RowKey': row_key_uuid}
|
||||
table_json = json.dumps(table_dict)
|
||||
http_resp = func.HttpResponse(status_code=200, headers=table_dict)
|
||||
resp.set(http_resp)
|
||||
resp.set(table_json)
|
||||
return table_json
|
||||
|
|
|
@ -19,8 +19,7 @@ app = func.FunctionApp(http_auth_level=func.AuthLevel.ANONYMOUS)
|
|||
row_key="{id}",
|
||||
partition_key="test")
|
||||
def table_in_binding(req: func.HttpRequest, testEntity):
|
||||
headers_dict = json.loads(testEntity)
|
||||
return func.HttpResponse(status_code=200, headers=headers_dict)
|
||||
return func.HttpResponse(status_code=200, body=testEntity)
|
||||
|
||||
|
||||
@app.function_name(name="table_out_binding")
|
||||
|
@ -36,6 +35,5 @@ def table_out_binding(req: func.HttpRequest, resp: func.Out[func.HttpResponse]):
|
|||
row_key_uuid = str(uuid.uuid4())
|
||||
table_dict = {'PartitionKey': 'test', 'RowKey': row_key_uuid}
|
||||
table_json = json.dumps(table_dict)
|
||||
http_resp = func.HttpResponse(status_code=200, headers=table_dict)
|
||||
resp.set(http_resp)
|
||||
resp.set(table_json)
|
||||
return table_json
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License.
|
||||
import json
|
||||
|
||||
import azure.functions as func
|
||||
|
||||
|
||||
def main(req: func.HttpRequest, testEntity):
|
||||
headers_dict = json.loads(testEntity)
|
||||
return func.HttpResponse(status_code=200, headers=headers_dict)
|
||||
return func.HttpResponse(status_code=200, body=testEntity)
|
||||
|
|
|
@ -10,6 +10,5 @@ def main(req: func.HttpRequest, resp: func.Out[func.HttpResponse]):
|
|||
row_key_uuid = str(uuid.uuid4())
|
||||
table_dict = {'PartitionKey': 'test', 'RowKey': row_key_uuid}
|
||||
table_json = json.dumps(table_dict)
|
||||
http_resp = func.HttpResponse(status_code=200, headers=table_dict)
|
||||
resp.set(http_resp)
|
||||
resp.set(table_json)
|
||||
return table_json
|
||||
|
|
|
@ -2,11 +2,10 @@
|
|||
# Licensed under the MIT License.
|
||||
import json
|
||||
import time
|
||||
import pathlib
|
||||
from datetime import datetime
|
||||
from unittest import skipIf
|
||||
|
||||
from dateutil import parser, tz
|
||||
from dateutil import parser
|
||||
|
||||
from azure_functions_worker.utils.common import is_envvar_true
|
||||
from tests.utils import testutils
|
||||
|
@ -35,16 +34,12 @@ class TestEventHubFunctions(testutils.WebHostTestCase):
|
|||
def get_libraries_to_install(cls):
|
||||
return ['azure-eventhub']
|
||||
|
||||
@testutils.retryable_test(3, 5)
|
||||
def test_eventhub_multiple(self):
|
||||
NUM_EVENTS = 3
|
||||
all_row_keys_seen = dict([(str(i), True) for i in range(NUM_EVENTS)])
|
||||
all_row_keys_seen = dict([(i, True) for i in range(NUM_EVENTS)])
|
||||
partition_key = str(round(time.time()))
|
||||
|
||||
# Dynamically rewrite function.json to point to new partition key
|
||||
# for recording EventHub state
|
||||
old_partition_key = self._get_table_partition_key()
|
||||
self._set_table_partition_key(partition_key)
|
||||
|
||||
# wait for host to restart after change
|
||||
time.sleep(5)
|
||||
|
||||
|
@ -57,34 +52,32 @@ class TestEventHubFunctions(testutils.WebHostTestCase):
|
|||
data=json.dumps(docs))
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
row_keys = [str(i) for i in range(NUM_EVENTS)]
|
||||
row_keys = [i for i in range(NUM_EVENTS)]
|
||||
seen = [False] * NUM_EVENTS
|
||||
row_keys_seen = dict(zip(row_keys, seen))
|
||||
|
||||
# Allow trigger to fire.
|
||||
time.sleep(5)
|
||||
|
||||
try:
|
||||
r = self.webhost.request('GET', 'get_eventhub_batch_triggered')
|
||||
r = self.webhost.request('GET', 'get_eventhub_batch_triggered')
|
||||
|
||||
# Waiting for the blob get updated with the latest data from the
|
||||
# eventhub output binding
|
||||
time.sleep(2)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
entries = r.json()
|
||||
for entry in entries:
|
||||
self.assertEqual(entry['PartitionKey'], partition_key)
|
||||
row_key = entry['RowKey']
|
||||
row_keys_seen[row_key] = True
|
||||
# Waiting for the blob get updated with the latest data from the
|
||||
# eventhub output binding
|
||||
time.sleep(2)
|
||||
self.assertEqual(r.status_code, 200)
|
||||
entries = r.json()
|
||||
for entry in entries:
|
||||
self.assertEqual(entry['PartitionKey'], partition_key)
|
||||
row_key = entry['RowKey']
|
||||
row_keys_seen[row_key] = True
|
||||
|
||||
self.assertDictEqual(all_row_keys_seen, row_keys_seen)
|
||||
finally:
|
||||
self._cleanup(old_partition_key)
|
||||
self.assertDictEqual(all_row_keys_seen, row_keys_seen)
|
||||
|
||||
@testutils.retryable_test(3, 5)
|
||||
def test_eventhub_multiple_with_metadata(self):
|
||||
# Generate a unique event body for EventHub event
|
||||
# Record the start_time and end_time for checking event enqueue time
|
||||
start_time = datetime.now(tz=tz.UTC)
|
||||
start_time = datetime.utcnow()
|
||||
count = 10
|
||||
random_number = str(round(time.time()) % 1000)
|
||||
req_body = {
|
||||
|
@ -98,7 +91,7 @@ class TestEventHubFunctions(testutils.WebHostTestCase):
|
|||
data=json.dumps(req_body))
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn('OK', r.text)
|
||||
end_time = datetime.now(tz=tz.UTC)
|
||||
end_time = datetime.utcnow()
|
||||
|
||||
# Once the event get generated, allow function host to pool from
|
||||
# EventHub and wait for metadata_multiple to execute,
|
||||
|
@ -119,8 +112,7 @@ class TestEventHubFunctions(testutils.WebHostTestCase):
|
|||
event = events[event_index]
|
||||
|
||||
# Check if the event is enqueued between start_time and end_time
|
||||
enqueued_time = parser.isoparse(event['enqueued_time']).astimezone(
|
||||
tz=tz.UTC)
|
||||
enqueued_time = parser.isoparse(event['enqueued_time'])
|
||||
self.assertTrue(start_time < enqueued_time < end_time)
|
||||
|
||||
# Check if event properties are properly set
|
||||
|
@ -142,36 +134,6 @@ class TestEventHubFunctions(testutils.WebHostTestCase):
|
|||
self.assertGreaterEqual(sys_props['SequenceNumber'], 0)
|
||||
self.assertIsNotNone(sys_props['Offset'])
|
||||
|
||||
def _cleanup(self, old_partition_key):
|
||||
self._set_table_partition_key(old_partition_key)
|
||||
|
||||
def _get_table_partition_key(self):
|
||||
func_dict = self._get_table_function_json_dict()
|
||||
partition_key = func_dict['bindings'][1]['partitionKey']
|
||||
return partition_key
|
||||
|
||||
def _set_table_partition_key(self, partition_key):
|
||||
full_json_path = self._get_table_function_json_path()
|
||||
|
||||
func_dict = self._get_table_function_json_dict()
|
||||
func_dict['bindings'][1]['partitionKey'] = partition_key
|
||||
|
||||
with open(full_json_path, 'w') as f:
|
||||
json.dump(func_dict, f, indent=2)
|
||||
|
||||
def _get_table_function_json_dict(self):
|
||||
full_json_path = self._get_table_function_json_path()
|
||||
|
||||
with open(full_json_path, 'r') as f:
|
||||
func_dict = json.load(f)
|
||||
|
||||
return func_dict
|
||||
|
||||
def _get_table_function_json_path(self):
|
||||
script_dir = pathlib.Path(self.get_script_dir())
|
||||
json_path = pathlib.Path('get_eventhub_batch_triggered/function.json')
|
||||
return testutils.TESTS_ROOT / script_dir / json_path
|
||||
|
||||
|
||||
@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST)
|
||||
or is_envvar_true(CONSUMPTION_DOCKER_TEST),
|
||||
|
@ -189,9 +151,10 @@ class TestEventHubBatchFunctionsStein(testutils.WebHostTestCase):
|
|||
def get_libraries_to_install(cls):
|
||||
return ['azure-eventhub']
|
||||
|
||||
@testutils.retryable_test(3, 5)
|
||||
def test_eventhub_multiple(self):
|
||||
NUM_EVENTS = 3
|
||||
all_row_keys_seen = dict([(str(i), True) for i in range(NUM_EVENTS)])
|
||||
all_row_keys_seen = dict([(i, True) for i in range(NUM_EVENTS)])
|
||||
partition_key = str(round(time.time()))
|
||||
|
||||
docs = []
|
||||
|
@ -203,7 +166,7 @@ class TestEventHubBatchFunctionsStein(testutils.WebHostTestCase):
|
|||
data=json.dumps(docs))
|
||||
self.assertEqual(r.status_code, 200)
|
||||
|
||||
row_keys = [str(i) for i in range(NUM_EVENTS)]
|
||||
row_keys = [i for i in range(NUM_EVENTS)]
|
||||
seen = [False] * NUM_EVENTS
|
||||
row_keys_seen = dict(zip(row_keys, seen))
|
||||
|
||||
|
@ -212,7 +175,7 @@ class TestEventHubBatchFunctionsStein(testutils.WebHostTestCase):
|
|||
|
||||
r = self.webhost.request(
|
||||
'GET',
|
||||
f'get_eventhub_batch_triggered/{partition_key}')
|
||||
'get_eventhub_batch_triggered')
|
||||
self.assertEqual(r.status_code, 200)
|
||||
entries = r.json()
|
||||
for entry in entries:
|
||||
|
@ -222,10 +185,11 @@ class TestEventHubBatchFunctionsStein(testutils.WebHostTestCase):
|
|||
|
||||
self.assertDictEqual(all_row_keys_seen, row_keys_seen)
|
||||
|
||||
@testutils.retryable_test(3, 5)
|
||||
def test_eventhub_multiple_with_metadata(self):
|
||||
# Generate a unique event body for EventHub event
|
||||
# Record the start_time and end_time for checking event enqueue time
|
||||
start_time = datetime.now(tz=tz.UTC)
|
||||
start_time = datetime.utcnow()
|
||||
count = 10
|
||||
random_number = str(round(time.time()) % 1000)
|
||||
req_body = {
|
||||
|
@ -239,7 +203,7 @@ class TestEventHubBatchFunctionsStein(testutils.WebHostTestCase):
|
|||
data=json.dumps(req_body))
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertIn('OK', r.text)
|
||||
end_time = datetime.now(tz=tz.UTC)
|
||||
end_time = datetime.utcnow()
|
||||
|
||||
# Once the event get generated, allow function host to pool from
|
||||
# EventHub and wait for metadata_multiple to execute,
|
||||
|
@ -260,8 +224,7 @@ class TestEventHubBatchFunctionsStein(testutils.WebHostTestCase):
|
|||
event = events[event_index]
|
||||
|
||||
# Check if the event is enqueued between start_time and end_time
|
||||
enqueued_time = parser.isoparse(event['enqueued_time']).astimezone(
|
||||
tz=tz.UTC)
|
||||
enqueued_time = parser.isoparse(event['enqueued_time'])
|
||||
self.assertTrue(start_time < enqueued_time < end_time)
|
||||
|
||||
# Check if event properties are properly set
|
||||
|
|
|
@ -2,10 +2,16 @@
|
|||
# Licensed under the MIT License.
|
||||
import json
|
||||
import time
|
||||
from unittest import skipIf
|
||||
|
||||
from azure_functions_worker.utils.common import is_envvar_true
|
||||
from tests.utils import testutils
|
||||
from tests.utils.constants import DEDICATED_DOCKER_TEST, CONSUMPTION_DOCKER_TEST
|
||||
|
||||
|
||||
@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST)
|
||||
or is_envvar_true(CONSUMPTION_DOCKER_TEST),
|
||||
"Skipping SB tests till docker image is updated with host 4.33")
|
||||
class TestServiceBusFunctions(testutils.WebHostTestCase):
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -24,7 +24,7 @@ class TestTableFunctions(testutils.WebHostTestCase):
|
|||
def test_table_bindings(self):
|
||||
out_resp = self.webhost.request('POST', 'table_out_binding')
|
||||
self.assertEqual(out_resp.status_code, 200)
|
||||
row_key = out_resp.headers['rowKey']
|
||||
row_key = json.loads(out_resp.text)['RowKey']
|
||||
|
||||
script_dir = pathlib.Path(self.get_script_dir())
|
||||
json_path = pathlib.Path('table_in_binding/function.json')
|
||||
|
@ -42,8 +42,12 @@ class TestTableFunctions(testutils.WebHostTestCase):
|
|||
|
||||
in_resp = self.webhost.request('GET', 'table_in_binding')
|
||||
self.assertEqual(in_resp.status_code, 200)
|
||||
in_row_key = in_resp.headers['rowKey']
|
||||
self.assertEqual(in_row_key, row_key)
|
||||
row_key_present = False
|
||||
for row in json.loads(in_resp.text):
|
||||
if row["RowKey"] == row_key:
|
||||
row_key_present = True
|
||||
break
|
||||
self.assertTrue(row_key_present)
|
||||
|
||||
|
||||
@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST)
|
||||
|
@ -60,12 +64,16 @@ class TestTableFunctionsStein(testutils.WebHostTestCase):
|
|||
def test_table_bindings(self):
|
||||
out_resp = self.webhost.request('POST', 'table_out_binding')
|
||||
self.assertEqual(out_resp.status_code, 200)
|
||||
row_key = out_resp.headers['rowKey']
|
||||
row_key = json.loads(out_resp.text)['RowKey']
|
||||
|
||||
in_resp = self.webhost.request('GET', f'table_in_binding/{row_key}')
|
||||
self.assertEqual(in_resp.status_code, 200)
|
||||
in_row_key = in_resp.headers['rowKey']
|
||||
self.assertEqual(in_row_key, row_key)
|
||||
row_key_present = False
|
||||
for row in json.loads(in_resp.text):
|
||||
if row["RowKey"] == row_key:
|
||||
row_key_present = True
|
||||
break
|
||||
self.assertTrue(row_key_present)
|
||||
|
||||
|
||||
@skipIf(is_envvar_true(DEDICATED_DOCKER_TEST)
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"scriptFile": "main.py",
|
||||
"bindings": [
|
||||
{
|
||||
"type": "httpTrigger",
|
||||
"direction": "in",
|
||||
"name": "req"
|
||||
},
|
||||
{
|
||||
"type": "http",
|
||||
"direction": "out",
|
||||
"name": "$return"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,17 +0,0 @@
|
|||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License.
|
||||
import logging
|
||||
|
||||
import azure.functions
|
||||
|
||||
logger = logging.getLogger('my function')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
def main(req: azure.functions.HttpRequest):
|
||||
logging.critical('logging critical', exc_info=True)
|
||||
logger.info('logging info', exc_info=True)
|
||||
logger.warning('logging warning', exc_info=True)
|
||||
logger.debug('logging debug', exc_info=True)
|
||||
logger.error('logging error', exc_info=True)
|
||||
return 'OK-user-debug'
|
|
@ -109,11 +109,6 @@ class TestHttpFunctions(testutils.WebHostTestCase):
|
|||
self.assertIn('logging error', host_out)
|
||||
self.assertNotIn('logging debug', host_out)
|
||||
|
||||
def test_debug_with_user_logging(self):
|
||||
r = self.webhost.request('GET', 'debug_user_logging')
|
||||
self.assertEqual(r.status_code, 200)
|
||||
self.assertEqual(r.text, 'OK-user-debug')
|
||||
|
||||
def check_log_debug_with_user_logging(self, host_out: typing.List[str]):
|
||||
self.assertIn('logging info', host_out)
|
||||
self.assertIn('logging warning', host_out)
|
||||
|
|
|
@ -7,34 +7,40 @@ EXTENSIONS_CSPROJ_TEMPLATE = """\
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp3.1</TargetFramework>
|
||||
<TargetFramework>net8.0</TargetFramework>
|
||||
<WarningsAsErrors></WarningsAsErrors>
|
||||
<DefaultItemExcludes>**</DefaultItemExcludes>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Azure.Messaging.EventHubs"
|
||||
Version="5.6.2" />
|
||||
Version="5.11.1" />
|
||||
<PackageReference Include="Azure.Messaging.EventGrid"
|
||||
Version="4.21.0" />
|
||||
Version="4.23.0" />
|
||||
<PackageReference Include="Microsoft.NET.Sdk.Functions"
|
||||
Version="4.0.1" />
|
||||
Version="4.3.0" />
|
||||
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.CosmosDB"
|
||||
Version="4.2.0" />
|
||||
Version="4.5.0" />
|
||||
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.EventHubs"
|
||||
Version="5.0.0" />
|
||||
Version="6.2.0" />
|
||||
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.EventGrid"
|
||||
Version="3.3.1" />
|
||||
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.Storage"
|
||||
Version="4.0.5" />
|
||||
Version="5.2.2" />
|
||||
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.ServiceBus"
|
||||
Version="4.2.1" />
|
||||
Version="5.14.0" />
|
||||
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.Sql"
|
||||
Version="3.0.534" />
|
||||
<PackageReference
|
||||
Include="Microsoft.Azure.WebJobs.Script.ExtensionsMetadataGenerator"
|
||||
Version="1.1.3" />
|
||||
Version="4.0.1" />
|
||||
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.DurableTask"
|
||||
Version="2.9.4" />
|
||||
Version="2.13.2" />
|
||||
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.Tables"
|
||||
Version="1.2.1" />
|
||||
<PackageReference Include="System.Drawing.Common"
|
||||
Version="4.7.3" />
|
||||
<PackageReference Include="System.Text.Json"
|
||||
Version="6.0.9" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
"""
|
||||
|
|
|
@ -31,6 +31,7 @@ _DUMMY_CONT_KEY = "MDEyMzQ1Njc4OUFCQ0RFRjAxMjM0NTY3ODlBQkNERUY="
|
|||
_FUNC_GITHUB_ZIP = "https://github.com/Azure/azure-functions-python-library" \
|
||||
"/archive/refs/heads/dev.zip"
|
||||
_FUNC_FILE_NAME = "azure-functions-python-library-dev"
|
||||
_CUSTOM_IMAGE = "CUSTOM_IMAGE"
|
||||
|
||||
|
||||
class LinuxConsumptionWebHostController:
|
||||
|
@ -277,8 +278,9 @@ class LinuxConsumptionWebHostController:
|
|||
return f'{iv_base64}.{encrypted_base64}.{key_sha256_base64}'
|
||||
|
||||
def __enter__(self):
|
||||
mesh_image = self._find_latest_mesh_image(self._host_version,
|
||||
self._py_version)
|
||||
mesh_image = (os.environ.get(_CUSTOM_IMAGE)
|
||||
or self._find_latest_mesh_image(self._host_version,
|
||||
self._py_version))
|
||||
self.spawn_container(image=mesh_image)
|
||||
return self
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче