From 89a54f07133fcca87e8a48505e40e0e765e38b8a Mon Sep 17 00:00:00 2001 From: hallvictoria <59299039+hallvictoria@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:03:57 -0500 Subject: [PATCH] fix: 1ES pipeline fixes (#1512) * security fixes * security fixes * formatting\ * formatting * added variable to pipeline * lint * directory typo * pin numpy * pin for 3.7 * pin for lc * remove numpy pinning * numpy test fixes * ADO changes * public build formatting * removed internal pool from unit tests * unit tests use linux pool * syncing with ADO changes * logging test and async httpv2 changes * core tools, unit test fixes * docker test fixes * skipping unit tests * installing base extension to container * install pytest * typo * additional pytest packages * added requests * initialized variable * install .[dev] * PEII docker variable * sync with ADO changes * docker test fixes * install docker dependencies * install docker dependencies * teardownclass * typo in wpc tests * separate folder for httpv2 tests * cache fix * name typo * adding PIWD for httpv2 * sync with ADO branch * sync with ADO branch * scheduled integration tests * lc env var fix * feedback * missed change --------- Co-authored-by: Victoria Hall --- .ci/e2e_integration_test/pipeline.yml | 50 --- .ci/linux_devops_build.sh | 17 -- .ci/linux_devops_e2e_tests.sh | 12 - .ci/linux_devops_tools.sh | 6 - .ci/linux_devops_unit_tests.sh | 4 - .../dockerfiles/perf_tests.Dockerfile | 26 -- .../dockerfiles/perf_tests_3.9.Dockerfile | 211 ------------- .../k6scripts/AsyncHttpTriggerCPUIntensive.js | 47 --- .../AsyncHttpTriggerCPUIntensiveWithSleeps.js | 47 --- .../k6scripts/AsyncHttpTriggerHelloWorld.js | 48 --- .../AsyncHttpTriggerWithAsyncRequest.js | 47 --- .../AsyncHttpTriggerWithSyncRequests.js | 47 --- .../SyncGetBlobAsBytesReturnHttpResponse.js | 65 ---- .ci/perf_tests/k6scripts/SyncHelloWorld.js | 48 --- .../k6scripts/SyncHttpTriggerCPUIntensive.js | 47 --- .../SyncHttpTriggerCPUIntensiveWithSleeps.js | 47 --- .../k6scripts/SyncHttpTriggerHtmlParser.js | 50 --- .../SyncHttpTriggerWithMixWorkloads.js | 47 --- .../SyncHttpTriggerWithSyncRequests.js | 49 --- .../SyncPutBlobAsBytesReturnHttpResponse.js | 53 ---- .ci/perf_tests/run-perftests.sh | 18 -- .github/workflows/ci_consumption_workflow.yml | 71 ----- .github/workflows/ci_docker_con_workflow.yml | 92 ------ .../workflows/ci_docker_custom_workflow.yml | 112 ------- .github/workflows/ci_docker_ded_workflow.yml | 93 ------ .github/workflows/ci_e2e_workflow.yml | 161 ---------- .github/workflows/ci_ut_workflow.yml | 91 ------ .github/workflows/codeql-analysis.yml | 70 ----- .github/workflows/perf-testing-setup.yml | 137 --------- azure-pipelines.yml | 217 ------------- eng/ci/core-tools-tests.yml | 27 ++ eng/ci/custom-image-tests.yml | 27 ++ eng/ci/docker-consumption-tests.yml | 38 +++ eng/ci/docker-dedicated-tests.yml | 38 +++ eng/ci/integration-tests.yml | 49 +++ eng/ci/official-build.yml | 22 +- eng/ci/public-build.yml | 40 ++- eng/templates/jobs/build.yml | 4 +- eng/templates/jobs/ci-unit-tests.yml | 42 +++ .../official/jobs/build-artifacts.yml | 286 ++++++++++++++---- .../official/jobs/ci-core-tools-tests.yml | 35 +++ .../official/jobs/ci-custom-image-tests.yml | 37 +++ .../jobs/ci-docker-consumption-tests.yml | 72 +++++ .../jobs/ci-docker-dedicated-tests.yml | 72 +++++ eng/templates/official/jobs/ci-e2e-tests.yml | 177 ++++------- eng/templates/official/jobs/ci-lc-tests.yml | 48 +++ pack/templates/macos_64_env_gen.yml | 4 - pack/templates/nix_env_gen.yml | 4 - pack/templates/win_env_gen.yml | 4 - .../function_app.py | 2 +- .../numpy_func/__init__.py | 2 +- tests/endtoend/test_http_functions.py | 166 +--------- .../test_worker_process_count_functions.py | 5 +- .../function_app.py | 6 +- .../test_deferred_bindings_blob_functions.py | 4 + .../http_functions_v2/fastapi/function_app.py | 0 .../http_v2_tests/test_http_v2.py | 189 ++++++++++++ tests/unittests/test_http_functions_v2.py | 2 + tests/unittests/test_loader.py | 2 + tests/unittests/test_logging.py | 4 +- tests/utils/testutils_docker.py | 32 +- tests/utils/testutils_lc.py | 8 +- 62 files changed, 1039 insertions(+), 2439 deletions(-) delete mode 100644 .ci/e2e_integration_test/pipeline.yml delete mode 100644 .ci/linux_devops_build.sh delete mode 100644 .ci/linux_devops_e2e_tests.sh delete mode 100644 .ci/linux_devops_tools.sh delete mode 100644 .ci/linux_devops_unit_tests.sh delete mode 100644 .ci/perf_tests/dockerfiles/perf_tests.Dockerfile delete mode 100644 .ci/perf_tests/dockerfiles/perf_tests_3.9.Dockerfile delete mode 100644 .ci/perf_tests/k6scripts/AsyncHttpTriggerCPUIntensive.js delete mode 100644 .ci/perf_tests/k6scripts/AsyncHttpTriggerCPUIntensiveWithSleeps.js delete mode 100644 .ci/perf_tests/k6scripts/AsyncHttpTriggerHelloWorld.js delete mode 100644 .ci/perf_tests/k6scripts/AsyncHttpTriggerWithAsyncRequest.js delete mode 100644 .ci/perf_tests/k6scripts/AsyncHttpTriggerWithSyncRequests.js delete mode 100644 .ci/perf_tests/k6scripts/SyncGetBlobAsBytesReturnHttpResponse.js delete mode 100644 .ci/perf_tests/k6scripts/SyncHelloWorld.js delete mode 100644 .ci/perf_tests/k6scripts/SyncHttpTriggerCPUIntensive.js delete mode 100644 .ci/perf_tests/k6scripts/SyncHttpTriggerCPUIntensiveWithSleeps.js delete mode 100644 .ci/perf_tests/k6scripts/SyncHttpTriggerHtmlParser.js delete mode 100644 .ci/perf_tests/k6scripts/SyncHttpTriggerWithMixWorkloads.js delete mode 100644 .ci/perf_tests/k6scripts/SyncHttpTriggerWithSyncRequests.js delete mode 100644 .ci/perf_tests/k6scripts/SyncPutBlobAsBytesReturnHttpResponse.js delete mode 100644 .ci/perf_tests/run-perftests.sh delete mode 100644 .github/workflows/ci_consumption_workflow.yml delete mode 100644 .github/workflows/ci_docker_con_workflow.yml delete mode 100644 .github/workflows/ci_docker_custom_workflow.yml delete mode 100644 .github/workflows/ci_docker_ded_workflow.yml delete mode 100644 .github/workflows/ci_e2e_workflow.yml delete mode 100644 .github/workflows/ci_ut_workflow.yml delete mode 100644 .github/workflows/codeql-analysis.yml delete mode 100644 .github/workflows/perf-testing-setup.yml delete mode 100644 azure-pipelines.yml create mode 100644 eng/ci/core-tools-tests.yml create mode 100644 eng/ci/custom-image-tests.yml create mode 100644 eng/ci/docker-consumption-tests.yml create mode 100644 eng/ci/docker-dedicated-tests.yml create mode 100644 eng/ci/integration-tests.yml create mode 100644 eng/templates/jobs/ci-unit-tests.yml create mode 100644 eng/templates/official/jobs/ci-core-tools-tests.yml create mode 100644 eng/templates/official/jobs/ci-custom-image-tests.yml create mode 100644 eng/templates/official/jobs/ci-docker-consumption-tests.yml create mode 100644 eng/templates/official/jobs/ci-docker-dedicated-tests.yml create mode 100644 eng/templates/official/jobs/ci-lc-tests.yml rename tests/{endtoend/http_functions => extension_tests/http_v2_tests}/http_functions_v2/fastapi/function_app.py (100%) create mode 100644 tests/extension_tests/http_v2_tests/test_http_v2.py diff --git a/.ci/e2e_integration_test/pipeline.yml b/.ci/e2e_integration_test/pipeline.yml deleted file mode 100644 index e8a34909..00000000 --- a/.ci/e2e_integration_test/pipeline.yml +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -# E2E Integration Test Pipeline - -pr: none -trigger: none - -pool: - vmImage: 'ubuntu-latest' - -variables: - DOTNET_VERSION: '3.1.x' - DOTNET_VERSION_6: '6.x' - PYTHON_VERSION: '3.9' - -steps: -- task: UsePythonVersion@0 - displayName: 'Install Python' - inputs: - versionSpec: $(PYTHON_VERSION) - addToPath: true -- task: UseDotNet@2 - displayName: 'Install DotNet 3' - inputs: - packageType: 'sdk' - version: $(DOTNET_VERSION) -- task: UseDotNet@2 - displayName: 'Install DotNet 6' - inputs: - packageType: 'sdk' - version: $(DOTNET_VERSION_6) -- pwsh: '$(Build.SourcesDirectory)/.ci/e2e_integration_test/start-e2e.ps1' - env: - AzureWebJobsStorage: $(AzureWebJobsStorage) - AzureWebJobsCosmosDBConnectionString: $(AzureWebJobsCosmosDBConnectionString) - AzureWebJobsEventHubConnectionString: $(AzureWebJobsEventHubConnectionString) - AzureWebJobsServiceBusConnectionString: $(AzureWebJobsServiceBusConnectionString) - AzureWebJobsSqlConnectionString: $(AzureWebJobsSqlConnectionString) - AzureWebJobsEventGridTopicUri: $(AzureWebJobsEventGridTopicUri) - AzureWebJobsEventGridConnectionKey: $(AzureWebJobsEventGridConnectionKey) - PythonVersion: $(PYTHON_VERSION) - displayName: 'Running Python Language Worker E2E Tests' -- task: PublishTestResults@2 - displayName: 'Publish Python Worker E2E Test Result' - condition: succeededOrFailed() - inputs: - testResultsFiles: 'e2e-integration-test-report.xml' - testRunTitle: 'Publish Python Worker E2E Test Result' - failTaskOnFailedTests: true diff --git a/.ci/linux_devops_build.sh b/.ci/linux_devops_build.sh deleted file mode 100644 index fdae3f57..00000000 --- a/.ci/linux_devops_build.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/bash - -set -e -x - -python -m pip install --upgrade pip - -# Install the latest Azure Functions Python Worker from test.pypi.org -python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U -e .[dev] - -# Install the latest Azure Functions Python Library from test.pypi.org -python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre - -# Download Azure Functions Host -python setup.py webhost - -# Setup WebJobs Extensions -python setup.py extension \ No newline at end of file diff --git a/.ci/linux_devops_e2e_tests.sh b/.ci/linux_devops_e2e_tests.sh deleted file mode 100644 index 7a5d5105..00000000 --- a/.ci/linux_devops_e2e_tests.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -set -e -x -export AzureWebJobsStorage=$LINUXSTORAGECONNECTIONSTRING -export AzureWebJobsCosmosDBConnectionString=$LINUXCOSMOSDBCONNECTIONSTRING -export AzureWebJobsEventHubConnectionString=$LINUXEVENTHUBCONNECTIONSTRING -export AzureWebJobsServiceBusConnectionString=$LINUXSERVICEBUSCONNECTIONSTRING -export AzureWebJobsSqlConnectionString=$LINUXSQLCONNECTIONSTRING -export AzureWebJobsEventGridTopicUri=$LINUXEVENTGRIDTOPICURI -export AzureWebJobsEventGridConnectionKey=$LINUXEVENTGRIDTOPICCONNECTIONKEY - -pytest --instafail --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend diff --git a/.ci/linux_devops_tools.sh b/.ci/linux_devops_tools.sh deleted file mode 100644 index c51befc2..00000000 --- a/.ci/linux_devops_tools.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash - -wget -q https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb \ -&& sudo dpkg -i packages-microsoft-prod.deb \ -&& sudo apt-get update \ -&& sudo apt-get install -y azure-functions-core-tools diff --git a/.ci/linux_devops_unit_tests.sh b/.ci/linux_devops_unit_tests.sh deleted file mode 100644 index 82078ea1..00000000 --- a/.ci/linux_devops_unit_tests.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -set -e -x -pytest --instafail --cov=./azure_functions_worker --cov-report xml --cov-branch tests/unittests \ No newline at end of file diff --git a/.ci/perf_tests/dockerfiles/perf_tests.Dockerfile b/.ci/perf_tests/dockerfiles/perf_tests.Dockerfile deleted file mode 100644 index 6adf9f1d..00000000 --- a/.ci/perf_tests/dockerfiles/perf_tests.Dockerfile +++ /dev/null @@ -1,26 +0,0 @@ -ARG PYTHON_VERSION=3.11 - -FROM mcr.microsoft.com/azure-functions/python:4-python$PYTHON_VERSION - -# Mounting local machines azure-functions-python-worker and azure-functions-python-library onto it -RUN rm -rf /azure-functions-host/workers/python/${PYTHON_VERSION}/LINUX/X64/azure_functions_worker - -# Use the following command to run the docker image with customizible worker and library -VOLUME ["/azure-functions-host/workers/python/${PYTHON_VERSION}/LINUX/X64/azure_functions_worker"] - -ENV AzureWebJobsScriptRoot=/home/site/wwwroot \ - AzureFunctionsJobHost__Logging__Console__IsEnabled=true \ - FUNCTIONS_WORKER_PROCESS_COUNT=1 \ - AZURE_FUNCTIONS_ENVIRONMENT=Development \ - FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED=1 - -RUN apt-get --quiet update && \ - apt-get install --quiet -y git procps && \ - # Procps is required for displaying worker and profiling processes info - cd /home && \ - git clone https://github.com/vrdmr/AzFunctionsPythonPerformance.git && \ - mkdir -p /home/site/wwwroot/ && \ - cp -r AzFunctionsPythonPerformance/* /home/site/wwwroot/ && \ - pip install -q -r /home/site/wwwroot/requirements.txt - -CMD [ "/azure-functions-host/Microsoft.Azure.WebJobs.Script.WebHost" ] diff --git a/.ci/perf_tests/dockerfiles/perf_tests_3.9.Dockerfile b/.ci/perf_tests/dockerfiles/perf_tests_3.9.Dockerfile deleted file mode 100644 index 39596e15..00000000 --- a/.ci/perf_tests/dockerfiles/perf_tests_3.9.Dockerfile +++ /dev/null @@ -1,211 +0,0 @@ -# -# NOTE: THIS DOCKERFILE IS SPECIALLY MADE FOR PYTHON 3.9 WORKLOAD. -# - -# Build the runtime from source -ARG HOST_VERSION=3.0.14492 -FROM mcr.microsoft.com/dotnet/core/sdk:3.1 AS runtime-image -ARG HOST_VERSION - -ENV PublishWithAspNetCoreTargetManifest=false - -RUN BUILD_NUMBER=$(echo ${HOST_VERSION} | cut -d'.' -f 3) && \ - git clone --branch v${HOST_VERSION} https://github.com/Azure/azure-functions-host /src/azure-functions-host && \ - cd /src/azure-functions-host && \ - HOST_COMMIT=$(git rev-list -1 HEAD) && \ - dotnet publish -v q /p:BuildNumber=$BUILD_NUMBER /p:CommitHash=$HOST_COMMIT src/WebJobs.Script.WebHost/WebJobs.Script.WebHost.csproj -c Release --output /azure-functions-host --runtime linux-x64 && \ - mv /azure-functions-host/workers /workers && mkdir /azure-functions-host/workers && \ - rm -rf /root/.local /root/.nuget /src - -RUN EXTENSION_BUNDLE_VERSION=1.3.3 && \ - EXTENSION_BUNDLE_FILENAME=Microsoft.Azure.Functions.ExtensionBundle.1.3.3_linux-x64.zip && \ - apt-get update && \ - apt-get install -y gnupg wget unzip && \ - wget https://functionscdn.azureedge.net/public/ExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION/$EXTENSION_BUNDLE_FILENAME && \ - mkdir -p /FuncExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION && \ - unzip /$EXTENSION_BUNDLE_FILENAME -d /FuncExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION && \ - rm -f /$EXTENSION_BUNDLE_FILENAME && \ - EXTENSION_BUNDLE_VERSION_V2=2.0.1 && \ - EXTENSION_BUNDLE_FILENAME_V2=Microsoft.Azure.Functions.ExtensionBundle.2.0.1_linux-x64.zip && \ - wget https://functionscdn.azureedge.net/public/ExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION_V2/$EXTENSION_BUNDLE_FILENAME_V2 && \ - mkdir -p /FuncExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION_V2 && \ - unzip /$EXTENSION_BUNDLE_FILENAME_V2 -d /FuncExtensionBundles/Microsoft.Azure.Functions.ExtensionBundle/$EXTENSION_BUNDLE_VERSION_V2 && \ - rm -f /$EXTENSION_BUNDLE_FILENAME_V2 &&\ - find /FuncExtensionBundles/ -type f -exec chmod 644 {} \; - -FROM buildpack-deps:buster - -# ensure local python is preferred over distribution python -ENV PATH /usr/local/bin:$PATH - -# http://bugs.python.org/issue19846 -# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. -ENV LANG C.UTF-8 - -# extra dependencies (over what buildpack-deps already includes) -RUN apt-get update && apt-get install -y --no-install-recommends \ - libbluetooth-dev \ - tk-dev \ - uuid-dev \ - && rm -rf /var/lib/apt/lists/* - -ENV GPG_KEY E3FF2839C048B25C084DEBE9B26995E310250568 -ENV PYTHON_VERSION 3.9.0 - -RUN set -ex \ - \ - && wget -O python.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ - && wget -O python.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ - && export GNUPGHOME="$(mktemp -d)" \ - && gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$GPG_KEY" \ - && gpg --batch --verify python.tar.xz.asc python.tar.xz \ - && { command -v gpgconf > /dev/null && gpgconf --kill all || :; } \ - && rm -rf "$GNUPGHOME" python.tar.xz.asc \ - && mkdir -p /usr/src/python \ - && tar -xJC /usr/src/python --strip-components=1 -f python.tar.xz \ - && rm python.tar.xz \ - && cd /usr/src/python \ - && gnuArch="$(dpkg-architecture --query DEB_BUILD_GNU_TYPE)" \ - && ./configure \ - --build="$gnuArch" \ - --enable-loadable-sqlite-extensions \ - --enable-optimizations \ - --enable-option-checking=fatal \ - --enable-shared \ - --with-system-expat \ - --with-system-ffi \ - --without-ensurepip \ - && make -j "$(nproc)" \ - && make install \ - && rm -rf /usr/src/python \ - && find /usr/local -depth \ - \( \ - \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ - -o \( -type f -a \( -name '*.pyc' -o -name '*.pyo' -o -name '*.a' \) \) \ - \) -exec rm -rf '{}' + \ - \ - && ldconfig \ - && python3 --version - -# make some useful symlinks that are expected to exist -RUN cd /usr/local/bin \ - && ln -s idle3 idle \ - && ln -s pydoc3 pydoc \ - && ln -s python3 python \ - && ln -s python3-config python-config - -# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value ''" -ENV PYTHON_PIP_VERSION 20.2.3 -# https://github.com/pypa/get-pip -ENV PYTHON_GET_PIP_URL https://github.com/pypa/get-pip/raw/fa7dc83944936bf09a0e4cb5d5ec852c0d256599/get-pip.py -ENV PYTHON_GET_PIP_SHA256 6e0bb0a2c2533361d7f297ed547237caf1b7507f197835974c0dd7eba998c53c - -RUN set -ex; \ - \ - wget -O get-pip.py "$PYTHON_GET_PIP_URL"; \ - echo "$PYTHON_GET_PIP_SHA256 *get-pip.py" | sha256sum --check --strict -; \ - \ - python get-pip.py \ - --disable-pip-version-check \ - --no-cache-dir \ - "pip==$PYTHON_PIP_VERSION" \ - ; \ - pip --version; \ - \ - find /usr/local -depth \ - \( \ - \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \ - -o \ - \( -type f -a \( -name '*.pyc' -o -name '*.pyo' \) \) \ - \) -exec rm -rf '{}' +; \ - rm -f get-pip.py - -ARG HOST_VERSION - -ENV LANG=C.UTF-8 \ - ACCEPT_EULA=Y \ - AzureWebJobsScriptRoot=/home/site/wwwroot \ - HOME=/home \ - FUNCTIONS_WORKER_RUNTIME=python \ - ASPNETCORE_URLS=http://+:80 \ - DOTNET_RUNNING_IN_CONTAINER=true \ - DOTNET_USE_POLLING_FILE_WATCHER=true \ - HOST_VERSION=${HOST_VERSION} - -# Install Python dependencies -RUN apt-get update && \ - apt-get install -y wget vim && \ - echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections && \ - apt-get update && \ - apt-get install -y apt-transport-https curl gnupg && \ - curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \ - curl https://packages.microsoft.com/config/debian/9/prod.list > /etc/apt/sources.list.d/mssql-release.list && \ - # Needed for libss1.0.0 and in turn MS SQL - echo 'deb http://security.debian.org/debian-security jessie/updates main' >> /etc/apt/sources.list && \ - # install necessary locales for MS SQL - apt-get update && apt-get install -y locales && \ - echo 'en_US.UTF-8 UTF-8' > /etc/locale.gen && \ - locale-gen && \ - # install MS SQL related packages - apt-get update && \ - apt-get install -y unixodbc msodbcsql17 mssql-tools && \ - # .NET Core dependencies - apt-get install -y --no-install-recommends ca-certificates \ - libc6 libgcc1 libgssapi-krb5-2 libicu63 libssl1.1 libstdc++6 zlib1g && \ - rm -rf /var/lib/apt/lists/* && \ - # Custom dependencies: - # OpenCV dependencies: - apt-get update && \ - apt-get install -y libglib2.0-0 libsm6 libxext6 libxrender-dev && \ - # binutils - apt-get install -y binutils && \ - # OpenMP dependencies - apt-get install -y libgomp1 && \ - # mysql dependencies - apt-get install -y default-libmysqlclient-dev - -COPY --from=runtime-image ["/azure-functions-host", "/azure-functions-host"] -COPY --from=runtime-image [ "/workers/python", "/azure-functions-host/workers/python" ] -COPY --from=runtime-image [ "/FuncExtensionBundles", "/FuncExtensionBundles" ] - -RUN pip install grpcio grpcio-tools - -RUN cp -r /azure-functions-host/workers/python/3.8 /azure-functions-host/workers/python/3.9 && \ - rm -r /azure-functions-host/workers/python/3.9/LINUX/X64/grpc && \ - rm -r /azure-functions-host/workers/python/3.9/LINUX/X64/grpcio-1.26.0.dist-info && \ - cp -r /usr/local/lib/python3.9/site-packages/grpc /azure-functions-host/workers/python/3.9/LINUX/X64/ && \ - cp -r /usr/local/lib/python3.9/site-packages/grpcio-1.32.0.dist-info /azure-functions-host/workers/python/3.9/LINUX/X64/ && \ - cp -r /usr/local/lib/python3.9/site-packages/grpc_tools /azure-functions-host/workers/python/3.9/LINUX/X64/ && \ - cp -r /usr/local/lib/python3.9/site-packages/grpcio_tools-1.32.0.dist-info /azure-functions-host/workers/python/3.9/LINUX/X64/ - -RUN cd /azure-functions-host/workers/python/ && \ - sed -i 's/3.8"]/3.8", "3.9"]/g' worker.config.json - -RUN cd /azure-functions-host/workers/python/3.9 && \ - sed -i 's/asyncio.Task.current_task/asyncio.current_task/g' OSX/X64/azure_functions_worker/dispatcher.py && \ - sed -i 's/asyncio.Task.current_task/asyncio.current_task/g' LINUX/X64/azure_functions_worker/dispatcher.py - -# Mounting local machines azure-functions-python-worker and azure-functions-python-library onto it -RUN rm -rf /azure-functions-host/workers/python/3.9/LINUX/X64/azure_functions_worker - -# Use the following command to run the docker image with customizible worker and library -VOLUME ["/azure-functions-host/workers/python/3.8/LINUX/X64/azure_functions_worker"] - -RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 379CE192D401AB61 && \ - echo "deb https://dl.bintray.com/loadimpact/deb stable main" | tee -a /etc/apt/sources.list && \ - apt-get update && \ - apt-get install -y git k6 procps && \ - # Procps is required for displaying worker and profiling processes info - cd /home && \ - git clone https://github.com/vrdmr/AzFunctionsPythonPerformance.git && \ - mkdir -p /home/site/wwwroot/ && \ - cp -r AzFunctionsPythonPerformance/* /home/site/wwwroot/ && \ - pip install -r /home/site/wwwroot/requirements.txt - -ENV FUNCTIONS_WORKER_RUNTIME_VERSION=3.9 \ - AzureWebJobsScriptRoot=/home/site/wwwroot \ - AzureFunctionsJobHost__Logging__Console__IsEnabled=true \ - FUNCTIONS_WORKER_PROCESS_COUNT=1 \ - AZURE_FUNCTIONS_ENVIRONMENT=Development - -CMD [ "/azure-functions-host/Microsoft.Azure.WebJobs.Script.WebHost" ] \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/AsyncHttpTriggerCPUIntensive.js b/.ci/perf_tests/k6scripts/AsyncHttpTriggerCPUIntensive.js deleted file mode 100644 index d6f93aab..00000000 --- a/.ci/perf_tests/k6scripts/AsyncHttpTriggerCPUIntensive.js +++ /dev/null @@ -1,47 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 50 VUs during first minute - { target: 25, duration: "1m" }, - // Hold at 50 VUs for the next 3 minutes and 30 seconds - { target: 25, duration: "3m45s" }, - // Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~5 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(95)<5000"], - // Thresholds based on the custom metric we defined and use to track application failures - "check_failure_rate": [ - // Global failure rate should be less than 1% - "rate<0.01", - // Abort the test early if it climbs over 5% - { threshold: "rate<=0.05", abortOnFail: true }, - ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerCPUIntensive`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200 - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/AsyncHttpTriggerCPUIntensiveWithSleeps.js b/.ci/perf_tests/k6scripts/AsyncHttpTriggerCPUIntensiveWithSleeps.js deleted file mode 100644 index b5a9986e..00000000 --- a/.ci/perf_tests/k6scripts/AsyncHttpTriggerCPUIntensiveWithSleeps.js +++ /dev/null @@ -1,47 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 50 VUs during first minute - { target: 25, duration: "1m" }, - // Hold at 50 VUs for the next 3 minutes and 30 seconds - { target: 25, duration: "3m45s" }, - // Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~5 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(95)<5000"], - // Thresholds based on the custom metric we defined and use to track application failures - "check_failure_rate": [ - // Global failure rate should be less than 1% - "rate<0.01", - // Abort the test early if it climbs over 5% - { threshold: "rate<=0.05", abortOnFail: true }, - ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerCPUIntensiveWithSleeps`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200 - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/AsyncHttpTriggerHelloWorld.js b/.ci/perf_tests/k6scripts/AsyncHttpTriggerHelloWorld.js deleted file mode 100644 index c4b59d73..00000000 --- a/.ci/perf_tests/k6scripts/AsyncHttpTriggerHelloWorld.js +++ /dev/null @@ -1,48 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 50 VUs during first minute - { target: 100, duration: "30s" }, - // Hold at 50 VUs for the next 3 minutes and 30 seconds - { target: 100, duration: "4m15s" }, - // Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~5 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(95)<5000"], - // Thresholds based on the custom metric we defined and use to track application failures - "check_failure_rate": [ - // Global failure rate should be less than 1% - "rate<0.01", - // Abort the test early if it climbs over 5% - { threshold: "rate<=0.05", abortOnFail: true }, - ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerHelloWorld`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200, - "content is present": (r) => r.body.indexOf("This HTTP triggered function executed successfully") !== -1, - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/AsyncHttpTriggerWithAsyncRequest.js b/.ci/perf_tests/k6scripts/AsyncHttpTriggerWithAsyncRequest.js deleted file mode 100644 index 2b2460f7..00000000 --- a/.ci/perf_tests/k6scripts/AsyncHttpTriggerWithAsyncRequest.js +++ /dev/null @@ -1,47 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 100 VUs during first minute - { target: 100, duration: "30s" }, - // Hold at 100 VUs for the next 1 minutes and 15 seconds - { target: 100, duration: "1m15s" }, - // Linearly ramp down from 100 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~2 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(95)<5000"], - // Thresholds based on the custom metric we defined and use to track application failures -// "check_failure_rate": [ -// // Global failure rate should be less than 1% -// "rate<0.01", -// // Abort the test early if it climbs over 5% -// { threshold: "rate<=0.05", abortOnFail: true }, -// ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerWithAsyncRequest`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200 - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/AsyncHttpTriggerWithSyncRequests.js b/.ci/perf_tests/k6scripts/AsyncHttpTriggerWithSyncRequests.js deleted file mode 100644 index 5acc66c8..00000000 --- a/.ci/perf_tests/k6scripts/AsyncHttpTriggerWithSyncRequests.js +++ /dev/null @@ -1,47 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 50 VUs during first minute - { target: 100, duration: "30s" }, - // Hold at 50 VUs for the next 3 minutes and 30 seconds - { target: 100, duration: "4m15s" }, - // Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~5 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(95)<5000"], - // Thresholds based on the custom metric we defined and use to track application failures - "check_failure_rate": [ - // Global failure rate should be less than 1% - "rate<0.01", - // Abort the test early if it climbs over 5% - { threshold: "rate<=0.05", abortOnFail: true }, - ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/AsyncHttpTriggerWithSyncRequests`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200 - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/SyncGetBlobAsBytesReturnHttpResponse.js b/.ci/perf_tests/k6scripts/SyncGetBlobAsBytesReturnHttpResponse.js deleted file mode 100644 index 5fab61fb..00000000 --- a/.ci/perf_tests/k6scripts/SyncGetBlobAsBytesReturnHttpResponse.js +++ /dev/null @@ -1,65 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); -var INPUT_FILENAME = 'Input_256MB' -var CONTENT_SIZE = 1024 * 1024 * 256; // 256 MB - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 20 VUs during first minute - { target: 20, duration: "1m" }, - // Hold at 20 VUs for the next 3 minutes and 45 seconds - { target: 20, duration: "3m45s" }, - // Linearly ramp down from 20 to 0 VUs over the last 15 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~5 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 40s - "http_req_duration": ["p(95)<40000"], - // Thresholds based on the custom metric we defined and use to track application failures - "check_failure_rate": [ - // Global failure rate should be less than 1% - "rate<0.01", - // Abort the test early if it climbs over 5% - { threshold: "rate<=0.05", abortOnFail: true }, - ], - }, -}; - -// Setup function -// This will create a blob which will later be used as an input binding -export function setup() { - let no_random_input = true; - let url = `${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncPutBlobAsBytesReturnHttpResponse?content_size=${CONTENT_SIZE}&no_random_input=${no_random_input}&outfile=${INPUT_FILENAME}`; - let response = http.get(url); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200, - "content_size matches": (r) => r.json().content_size === CONTENT_SIZE, - }); -} - -// Main function -export default function () { - let url = `${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncGetBlobAsBytesReturnHttpResponse?infile=${INPUT_FILENAME}`; - let response = http.get(url); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200, - "content_size matches": (r) => r.json().content_size === CONTENT_SIZE, - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} diff --git a/.ci/perf_tests/k6scripts/SyncHelloWorld.js b/.ci/perf_tests/k6scripts/SyncHelloWorld.js deleted file mode 100644 index acb28f56..00000000 --- a/.ci/perf_tests/k6scripts/SyncHelloWorld.js +++ /dev/null @@ -1,48 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 100 VUs during first minute - { target: 100, duration: "30s" }, - // Hold at 100 VUs for the next 1 minutes and 15 seconds - { target: 100, duration: "1m15s" }, - // Linearly ramp down from 100 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~2 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(50)<5000"] - // Thresholds based on the custom metric we defined and use to track application failures -// "check_failure_rate": [ -// // Global failure rate should be less than 1% -// "rate<0.01", -// // Abort the test early if it climbs over 5% -// { threshold: "rate<=0.05", abortOnFail: true }, -// ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHelloWorld`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200, - // "content is present": (r) => r.body.indexOf("This HTTP triggered function executed successfully") !== -1, - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} diff --git a/.ci/perf_tests/k6scripts/SyncHttpTriggerCPUIntensive.js b/.ci/perf_tests/k6scripts/SyncHttpTriggerCPUIntensive.js deleted file mode 100644 index 3e9e154d..00000000 --- a/.ci/perf_tests/k6scripts/SyncHttpTriggerCPUIntensive.js +++ /dev/null @@ -1,47 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 25 VUs during first minute - { target: 25, duration: "1m" }, - // Hold at 25 VUs for the next 1 minutes and 30 seconds - { target: 25, duration: "1m45s" }, - // Linearly ramp down from 25 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~3 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(10)<5000"], -// // Thresholds based on the custom metric we defined and use to track application failures -// "check_failure_rate": [ -// // Global failure rate should be less than 1% -// "rate<0.01", -// // Abort the test early if it climbs over 5% -// { threshold: "rate<=0.05", abortOnFail: true }, -// ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerCPUIntensive`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200 - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/SyncHttpTriggerCPUIntensiveWithSleeps.js b/.ci/perf_tests/k6scripts/SyncHttpTriggerCPUIntensiveWithSleeps.js deleted file mode 100644 index 158e4903..00000000 --- a/.ci/perf_tests/k6scripts/SyncHttpTriggerCPUIntensiveWithSleeps.js +++ /dev/null @@ -1,47 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 50 VUs during first minute - { target: 25, duration: "1m" }, - // Hold at 50 VUs for the next 3 minutes and 30 seconds - { target: 25, duration: "3m45s" }, - // Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~5 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(95)<5000"], - // Thresholds based on the custom metric we defined and use to track application failures - "check_failure_rate": [ - // Global failure rate should be less than 1% - "rate<0.01", - // Abort the test early if it climbs over 5% - { threshold: "rate<=0.05", abortOnFail: true }, - ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerCPUIntensiveWithSleeps`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200 - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/SyncHttpTriggerHtmlParser.js b/.ci/perf_tests/k6scripts/SyncHttpTriggerHtmlParser.js deleted file mode 100644 index a10ac626..00000000 --- a/.ci/perf_tests/k6scripts/SyncHttpTriggerHtmlParser.js +++ /dev/null @@ -1,50 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -const fileContents = open('./largish_body.html') - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 50 VUs during first minute - { target: 50, duration: "1m" }, - // Hold at 50 VUs for the next 3 minutes and 30 seconds - { target: 50, duration: "3m45s" }, - // Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~5 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(95)<5000"], - // Thresholds based on the custom metric we defined and use to track application failures - "check_failure_rate": [ - // Global failure rate should be less than 1% - "rate<0.01", - // Abort the test early if it climbs over 5% - { threshold: "rate<=0.05", abortOnFail: true }, - ], - }, -}; - -// Main function -export default function () { - let response = http.post(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerHtmlParser`, fileContents); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200, - "content is present": (r) => r.body.indexOf("StartTagCount") !== -1, - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/SyncHttpTriggerWithMixWorkloads.js b/.ci/perf_tests/k6scripts/SyncHttpTriggerWithMixWorkloads.js deleted file mode 100644 index e5f25f66..00000000 --- a/.ci/perf_tests/k6scripts/SyncHttpTriggerWithMixWorkloads.js +++ /dev/null @@ -1,47 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 50 VUs during first minute - { target: 50, duration: "1m" }, - // Hold at 50 VUs for the next 3 minutes and 30 seconds - { target: 50, duration: "3m45s" }, - // Linearly ramp down from 50 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~5 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(50)<5000"], - // Thresholds based on the custom metric we defined and use to track application failures -// "check_failure_rate": [ -// // Global failure rate should be less than 1% -// "rate<0.01", -// // Abort the test early if it climbs over 5% -// { threshold: "rate<=0.05", abortOnFail: true }, -// ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerWithMixWorkloads`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200 - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/SyncHttpTriggerWithSyncRequests.js b/.ci/perf_tests/k6scripts/SyncHttpTriggerWithSyncRequests.js deleted file mode 100644 index 16aabe06..00000000 --- a/.ci/perf_tests/k6scripts/SyncHttpTriggerWithSyncRequests.js +++ /dev/null @@ -1,49 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 100 VUs during first minute - { target: 100, duration: "30s" }, - // Hold at 100 VUs for the next 1 minutes and 15 seconds - { target: 100, duration: "1m15s" }, - // Linearly ramp down from 100 to 0 50 VUs over the last 30 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~2 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 500ms - "http_req_duration": ["p(10)<5000"], - // Requests with the staticAsset tag should finish even faster -// "http_req_duration{staticAsset:yes}": ["p(99)<250"], -// // Thresholds based on the custom metric we defined and use to track application failures -// "check_failure_rate": [ -// // Global failure rate should be less than 1% -// "rate<0.01", -// // Abort the test early if it climbs over 5% -// { threshold: "rate<=0.05", abortOnFail: true }, -// ], - }, -}; - -// Main function -export default function () { - let response = http.get(`${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncHttpTriggerWithSyncRequests`); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200 - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} \ No newline at end of file diff --git a/.ci/perf_tests/k6scripts/SyncPutBlobAsBytesReturnHttpResponse.js b/.ci/perf_tests/k6scripts/SyncPutBlobAsBytesReturnHttpResponse.js deleted file mode 100644 index 7426e5f7..00000000 --- a/.ci/perf_tests/k6scripts/SyncPutBlobAsBytesReturnHttpResponse.js +++ /dev/null @@ -1,53 +0,0 @@ -import { check } from "k6"; -import { Rate } from "k6/metrics"; -import http from "k6/http"; -import { randomIntBetween } from "https://jslib.k6.io/k6-utils/1.0.0/index.js"; - -var HOSTNAME = __ENV.HOSTNAME || 'localhost'; -var PORT = __ENV.PORT || '80'; -var PROTOCOL = __ENV.PROTOCOL || (PORT === '80' ? 'http' : 'https'); - -// A custom metric to track failure rates -var failureRate = new Rate("check_failure_rate"); - -// Options -export let options = { - stages: [ - // Linearly ramp up from 1 to 50 VUs during first minute - { target: 50, duration: "1m" }, - // Hold at 50 VUs for the next 3 minutes and 45 seconds - { target: 50, duration: "3m45s" }, - // Linearly ramp down from 50 to 0 VUs over the last 15 seconds - { target: 0, duration: "15s" } - // Total execution time will be ~5 minutes - ], - thresholds: { - // We want the 95th percentile of all HTTP request durations to be less than 40s - "http_req_duration": ["p(95)<40000"], - // Thresholds based on the custom metric we defined and use to track application failures - "check_failure_rate": [ - // Global failure rate should be less than 1% - "rate<0.01", - // Abort the test early if it climbs over 5% - { threshold: "rate<=0.05", abortOnFail: true }, - ], - }, -}; - -// Main function -export default function () { - let content_size = 1024 * 1024 * 256; // 256 MB - let no_random_input = true; - let outfile = randomIntBetween(1,500000); - let url = `${PROTOCOL}://${HOSTNAME}:${PORT}/api/SyncPutBlobAsBytesReturnHttpResponse?content_size=${content_size}&no_random_input=${no_random_input}&outfile=${outfile}`; - let response = http.get(url); - - // check() returns false if any of the specified conditions fail - let checkRes = check(response, { - "status is 200": (r) => r.status === 200, - "content_size matches": (r) => r.json().content_size === content_size, - }); - - // We reverse the check() result since we want to count the failures - failureRate.add(!checkRes); -} diff --git a/.ci/perf_tests/run-perftests.sh b/.ci/perf_tests/run-perftests.sh deleted file mode 100644 index 2d117638..00000000 --- a/.ci/perf_tests/run-perftests.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -HOST=$1 -PORT=$2 -PERF_TESTS_LINK=$3 -TEST_TO_RUN=$4 -PROTOCOL=http - -runk6tests () { - PROTOCOL=$PROTOCOL HOSTNAME=$1 PORT=$2 ./k6 run --summary-export=test-summary.json -q $PERF_TESTS_LINK/$TEST_TO_RUN.js -} - -printresults () { - cat test-summary.json -} - -runk6tests "$HOST" "$PORT" -#printresults diff --git a/.github/workflows/ci_consumption_workflow.yml b/.github/workflows/ci_consumption_workflow.yml deleted file mode 100644 index 3a280cdb..00000000 --- a/.github/workflows/ci_consumption_workflow.yml +++ /dev/null @@ -1,71 +0,0 @@ -# This workflow will run all tests in tests/consumption_tests in Docker using a consumption image -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: CI Consumption E2E tests - -on: - workflow_dispatch: - inputs: - custom_image: - description: "Use a custom image to run consumption tests" - required: false - push: - branches: [ dev, main, release/* ] - pull_request: - branches: [ dev, main, release/* ] - -jobs: - build: - name: "Python Consumption CI Run" - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: [ 3.7, 3.8, 3.9, "3.10", "3.11" ] - permissions: read-all - steps: - - name: Checkout code. - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre - python -m pip install -U -e .[dev] - if [[ "${{ matrix.python-version }}" != "3.7" ]]; then - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-http-v2] - fi - python setup.py build - - name: Running 3.7 Tests - if: matrix.python-version == 3.7 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }} - CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }} - run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests - - name: Running 3.8 Tests - if: matrix.python-version == 3.8 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }} - CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }} - run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests - - name: Running 3.9 Tests - if: matrix.python-version == 3.9 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }} - CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }} - run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests - - name: Running 3.10 Tests - if: matrix.python-version == 3.10 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }} - CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }} - run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests - - name: Running 3.11 Tests - if: matrix.python-version == 3.11 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }} - CUSTOM_IMAGE: ${{ github.event.inputs.custom_image }} - run: python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests diff --git a/.github/workflows/ci_docker_con_workflow.yml b/.github/workflows/ci_docker_con_workflow.yml deleted file mode 100644 index 8ec29b3a..00000000 --- a/.github/workflows/ci_docker_con_workflow.yml +++ /dev/null @@ -1,92 +0,0 @@ -# This workflow will run all tests in endtoend/tests in a docker container using the latest consumption image - -name: CI Docker Consumption tests - -on: - workflow_dispatch: - schedule: - # Run everyday at 5 AM CST - - cron: "0 10 * * *" - -jobs: - build: - name: "Python Docker CI Run" - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: [ 3.7, 3.8, 3.9, "3.10", "3.11" ] - permissions: read-all - env: - CONSUMPTION_DOCKER_TEST: "true" - - steps: - - name: Checkout code. - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python setup.py build - - name: Running 3.7 Tests - if: matrix.python-version == 3.7 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString37 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString37 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString37 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString37 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString37 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString37 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.8 Tests - if: matrix.python-version == 3.8 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString38 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString38 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString38 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString38 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString38 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString38 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.9 Tests - if: matrix.python-version == 3.9 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString39 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString39 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString39 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString39 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString39 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString39 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.10 Tests - if: matrix.python-version == 3.10 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString310 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString310 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString310 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString310 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString310 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString310 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.11 Tests - if: matrix.python-version == 3.11 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString311 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString311 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString311 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString311 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString311 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString311 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend diff --git a/.github/workflows/ci_docker_custom_workflow.yml b/.github/workflows/ci_docker_custom_workflow.yml deleted file mode 100644 index 79b92b17..00000000 --- a/.github/workflows/ci_docker_custom_workflow.yml +++ /dev/null @@ -1,112 +0,0 @@ -# This workflow will run all tests in endtoend/tests in a docker container using custom dedicated or consumption image - - -name: CI Docker Custom tests - -on: - workflow_dispatch: - inputs: - image_name: - description: 'Image' - required: true - python_version: - description: 'Python Version' - required: true - type: choice - default: '3.11' - options: [ '3.7', '3.8', '3.9', '3.10', '3.11' ] - DEDICATED_DOCKER_TEST: - description: 'Is this Dedicated Image?' - required: true - type: choice - default: 'true' - options: [ 'true', 'false' ] - CONSUMPTION_DOCKER_TEST: - description: 'Is this Consumption Image?' - required: true - type: choice - default: 'false' - options: [ 'true', 'false' ] - -jobs: - build: - name: "Python Docker CI Run" - runs-on: ubuntu-latest - strategy: - fail-fast: false - permissions: read-all - env: - DEDICATED_DOCKER_TEST: ${{ github.event.inputs.DEDICATED_DOCKER_TEST }} - CONSUMPTION_DOCKER_TEST: ${{ github.event.inputs.CONSUMPTION_DOCKER_TEST }} - python_version: ${{ github.event.inputs.python_version }} - IMAGE_NAME: ${{ github.event.inputs.image_name }} - steps: - - name: Checkout code. - uses: actions/checkout@v2 - - name: Set up Python ${{ env.python_version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ env.python_version }} - - name: Install dependencies - run: | - python setup.py build - - name: Running 3.7 Tests - if: env.python_version == 3.7 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString37 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString37 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString37 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString37 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString37 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString37 }} - run: | - python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.8 Tests - if: env.python_version == 3.8 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString38 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString38 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString38 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString38 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString38 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString38 }} - run: | - python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.9 Tests - if: env.python_version == 3.9 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString39 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString39 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString39 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString39 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString39 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString39 }} - run: | - python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.10 Tests - if: env.python_version == 3.10 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString310 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString310 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString310 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString310 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString310 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString310 }} - run: | - python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.11 Tests - if: env.python_version == 3.11 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString311 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString311 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString311 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString311 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString311 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString311 }} - run: | - python -m pytest --dist loadfile --reruns 4 -vv --instafail tests/endtoend \ No newline at end of file diff --git a/.github/workflows/ci_docker_ded_workflow.yml b/.github/workflows/ci_docker_ded_workflow.yml deleted file mode 100644 index 1e117bf6..00000000 --- a/.github/workflows/ci_docker_ded_workflow.yml +++ /dev/null @@ -1,93 +0,0 @@ -# This workflow will run all tests in endtoend/tests in a docker container using the latest dedicated image - -name: CI Docker Dedicated tests - -on: - workflow_dispatch: - schedule: - # Run everyday at 4 AM CST - - cron: "0 9 * * *" - -jobs: - build: - name: "Python Docker CI Run" - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: [ 3.7, 3.8, 3.9, "3.10", "3.11" ] - permissions: read-all - env: - DEDICATED_DOCKER_TEST: "true" - - steps: - - name: Checkout code. - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python setup.py build - - name: Running 3.7 Tests - if: matrix.python-version == 3.7 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString37 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString37 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString37 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString37 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString37 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString37 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.8 Tests - if: matrix.python-version == 3.8 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString38 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString38 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString38 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString38 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString38 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString38 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.9 Tests - if: matrix.python-version == 3.9 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString39 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString39 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString39 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString39 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString39 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString39 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.10 Tests - if: matrix.python-version == 3.10 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString310 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString310 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString310 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString310 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString310 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString310 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend - - name: Running 3.11 Tests - if: matrix.python-version == 3.11 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString311 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString311 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString311 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString311 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString311 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString311 }} - run: | - python -m pytest --reruns 4 -vv --instafail tests/endtoend - diff --git a/.github/workflows/ci_e2e_workflow.yml b/.github/workflows/ci_e2e_workflow.yml deleted file mode 100644 index 0d97b962..00000000 --- a/.github/workflows/ci_e2e_workflow.yml +++ /dev/null @@ -1,161 +0,0 @@ -# This workflow will install Python dependencies and run end to end tests with single version of Python -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: CI E2E tests - -on: - workflow_dispatch: - inputs: - archive_webhost_logging: - description: "For debugging purposes, archive test webhost logs" - required: false - default: "false" - push: - branches: [dev, main, release/*] - pull_request: - branches: [dev, main, release/*] - schedule: - # Monday to Friday 3 AM CST build - # * is a special character in YAML so you have to quote this string - - cron: "0 8 * * 1,2,3,4,5" - -jobs: - build: - name: "Python E2E CI Run" - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: [3.7, 3.8, 3.9, "3.10", "3.11"] - # Currently runs FWPC E2E tests, classic E2E tests, and DefBindings E2E tests. - # To run tests from another script, add the script name to this matrix - test-type: [fwpc-e2e-tests, e2e-tests, deferred-bindings-e2e-tests] - permissions: read-all - steps: - - name: Checkout code. - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Set up Dotnet 8.0.x - uses: actions/setup-dotnet@v4 - with: - dotnet-version: "8.0.x" - - name: Install dependencies and the worker - run: | - retry() { - local -r -i max_attempts="$1"; shift - local -r cmd="$@" - local -i attempt_num=1 - until $cmd - do - if (( attempt_num == max_attempts )) - then - echo "Attempt $attempt_num failed and there are no more attempts left!" - return 1 - else - echo "Attempt $attempt_num failed! Trying again in $attempt_num seconds..." - sleep 1 - fi - done - } - - python -m pip install --upgrade pip - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre - python -m pip install -U -e .[dev] - - if [[ "${{ matrix.python-version }}" != "3.7" ]]; then - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-http-v2] - fi - if [[ "${{ matrix.python-version }}" != "3.7" && "${{ matrix.python-version }}" != "3.8" ]]; then - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-deferred-bindings] - fi - - # Retry a couple times to avoid certificate issue - retry 5 python setup.py build - retry 5 python setup.py webhost --branch-name=dev - retry 5 python setup.py extension - mkdir logs - - name: Grant execute permission - run: | - chmod +x .github/Scripts/${{ matrix.test-type }}.sh - - name: Running 3.7 ${{ matrix.test-type }} - if: matrix.python-version == 3.7 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString37 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString37 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString37 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString37 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString37 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString37 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - run: | - .github/Scripts/${{ matrix.test-type }}.sh - - name: Running 3.8 ${{ matrix.test-type }} - if: matrix.python-version == 3.8 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString38 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString38 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString38 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString38 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString38 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString38 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - run: | - .github/Scripts/${{ matrix.test-type }}.sh - - name: Running 3.9 ${{ matrix.test-type }} - if: matrix.python-version == 3.9 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString39 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString39 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString39 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString39 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString39 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString39 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - run: | - .github/Scripts/${{ matrix.test-type }}.sh - - name: Running 3.10 ${{ matrix.test-type }} - if: matrix.python-version == 3.10 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString310 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString310 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString310 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString310 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString310 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString310 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - run: | - .github/Scripts/${{ matrix.test-type }}.sh - - name: Running 3.11 ${{ matrix.test-type }} - if: matrix.python-version == 3.11 - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString311 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString311 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString311 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString311 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString311 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString311 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - run: | - .github/Scripts/${{ matrix.test-type }}.sh - - name: Codecov - uses: codecov/codecov-action@v3 - with: - file: ./coverage.xml # optional - flags: unittests # optional - name: codecov # optional - fail_ci_if_error: false # optional (default = false) - - name: Publish Logs to Artifact - if: failure() - uses: actions/upload-artifact@v4 - with: - name: Test WebHost Logs ${{ github.run_id }} ${{ matrix.python-version }} - path: logs/*.log - if-no-files-found: ignore \ No newline at end of file diff --git a/.github/workflows/ci_ut_workflow.yml b/.github/workflows/ci_ut_workflow.yml deleted file mode 100644 index 68414f73..00000000 --- a/.github/workflows/ci_ut_workflow.yml +++ /dev/null @@ -1,91 +0,0 @@ -# This workflow will install Python dependencies, run tests and lint with a single version of Python -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: CI Unit tests - -on: - workflow_dispatch: - inputs: - archive_webhost_logging: - description: "For debugging purposes, archive test webhost logs" - required: false - default: "false" - schedule: - # Monday to Thursday 3 AM CST build - # * is a special character in YAML so you have to quote this string - - cron: "0 8 * * 1,2,3,4" - push: - pull_request: - branches: [ dev, main, release/* ] - -jobs: - build: - name: "Python UT CI Run" - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - python-version: [ 3.7, 3.8, 3.9, "3.10", "3.11" ] - permissions: read-all - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - name: Set up Dotnet 8.0.x - uses: actions/setup-dotnet@v4 - with: - dotnet-version: "8.0.x" - - name: Install dependencies and the worker - run: | - retry() { - local -r -i max_attempts="$1"; shift - local -r cmd="$@" - local -i attempt_num=1 - until $cmd - do - if (( attempt_num == max_attempts )) - then - echo "Attempt $attempt_num failed and there are no more attempts left!" - return 1 - else - echo "Attempt $attempt_num failed! Trying again in $attempt_num seconds..." - sleep 1 - fi - done - } - - python -m pip install --upgrade pip - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre - - python -m pip install -U -e .[dev] - if [[ "${{ matrix.python-version }}" != "3.7" ]]; then - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-http-v2] - fi - - # Retry a couple times to avoid certificate issue - retry 5 python setup.py build - retry 5 python setup.py webhost --branch-name=dev - retry 5 python setup.py extension - mkdir logs - - name: Test with pytest - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }} # needed for installing azure-functions-durable while running setup.py - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - run: | - python -m pytest -q -n auto --dist loadfile --reruns 4 --instafail --cov=./azure_functions_worker --cov-report xml --cov-branch tests/unittests - - name: Codecov - uses: codecov/codecov-action@v3 - with: - file: ./coverage.xml # optional - flags: unittests # optional - name: codecov # optional - fail_ci_if_error: false # optional (default = false) - - name: Publish Logs to Artifact - if: failure() - uses: actions/upload-artifact@v4 - with: - name: Test WebHost Logs ${{ github.run_id }} ${{ matrix.python-version }} - path: logs/*.log - if-no-files-found: ignore diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml deleted file mode 100644 index ebcc8cae..00000000 --- a/.github/workflows/codeql-analysis.yml +++ /dev/null @@ -1,70 +0,0 @@ -# For most projects, this workflow file will not need changing; you simply need -# to commit it to your repository. -# -# You may wish to alter this file to override the set of languages analyzed, -# or to provide custom queries or build logic. -# -# ******** NOTE ******** -# We have attempted to detect the languages in your repository. Please check -# the `language` matrix defined below to confirm you have the correct set of -# supported CodeQL languages. -# -name: "CodeQL" - -on: - push: - branches: [ dev, v3.x-dev, main*, release* ] - pull_request: - # The branches below must be a subset of the branches above - branches: [ dev ] - schedule: - - cron: '25 11 * * 5' - -jobs: - analyze: - name: Analyze - runs-on: ubuntu-latest - permissions: - actions: read - contents: read - security-events: write - - strategy: - fail-fast: false - matrix: - language: [ 'python' ] - # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] - # Learn more about CodeQL language support at https://git.io/codeql-language-support - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main - - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 - - # ℹ️ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl - - # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language - - #- run: | - # make bootstrap - # make release - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/perf-testing-setup.yml b/.github/workflows/perf-testing-setup.yml deleted file mode 100644 index eff49090..00000000 --- a/.github/workflows/perf-testing-setup.yml +++ /dev/null @@ -1,137 +0,0 @@ -name: Throughput testing workflow - -on: - workflow_dispatch: - inputs: - profiling_sampling_rate: - description: 'Profiling sampling rate (tps)' - required: false - default: '500' - test_to_run: - description: 'List of perf tests to run' - required: false - default: SyncHelloWorld - report_format: - description: 'Format of profiling report' - type: choice - required: true - options: - - speedscope - - flamegraph - issue_comment: - types: [created] -env: - TESTS_DIR_PATH: ".ci/perf_tests/k6scripts/" - PYTHON_VERSION: "3.10" - PYTHON_FUNCTION_PROFILING_STORAGE_ACCT: "azpyfuncpipelinestorage" - PORT: 8000 - -jobs: - build: - if: ${{ github.event_name == 'workflow_dispatch' || github.event.issue.pull_request != null && contains(github.event.comment.body, '/profile') }} - runs-on: ubuntu-latest - permissions: read-all - strategy: - fail-fast: false - matrix: - test_to_run: ['${{ github.event.inputs.test_to_run }}'] - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@v2 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: Setup k6 for throughput testing - run: | - cd $GITHUB_WORKSPACE - curl https://github.com/loadimpact/k6/releases/download/v0.28.0/k6-v0.28.0-linux64.tar.gz -L | tar xvz --strip-components 1 - chmod 755 ./k6 - ./k6 version - - name: Install dependencies and the worker - run: | - python -m pip install -q --upgrade pip - python -m pip install -q --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U -e .[dev] - python -m pip install -q --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre - python setup.py build - python setup.py webhost --branch-name=dev - python setup.py extension - - name: Build and Run the Docker image - run: | - echo "Building Docker image with Python version ${{ env.PYTHON_VERSION }}" - docker build --build-arg PYTHON_VERSION=${{ env.PYTHON_VERSION }} --file .ci/perf_tests/dockerfiles/perf_tests.Dockerfile --tag perfimage:latest . - - echo "Running Docker container..." - container_id=$(docker run -d --privileged --env FUNCTIONS_WORKER_RUNTIME_VERSION=${{ env.PYTHON_VERSION }} -p ${PORT}:80 -v $GITHUB_WORKSPACE/azure_functions_worker:/azure-functions-host/workers/python/${{ env.PYTHON_VERSION }}/LINUX/X64/azure_functions_worker perfimage:latest) - sleep 10 # host needs some time to start. - echo "Container ID is $container_id" - - echo "Fetching Docker container logs..." - docker logs $container_id - worker_pid=$(docker exec $container_id sh -c "ps aux | grep '[p]ython'" | awk '{print $2}') - echo "Python worker process id is $worker_pid" - - echo "container_id=$container_id" >> $GITHUB_ENV - echo "worker_pid=$worker_pid" >> $GITHUB_ENV - - - name: Validate if the functions are now running - run: | - curl --get http://localhost:${PORT}/api/${{ matrix.test_to_run }} - - - name: Start py-spy in the background - run: | - docker exec $container_id sh -c "pip install py-spy" - docker exec $container_id sh -c "mkdir /home/profiling_reports" - profiling_sampling_rate=${{ github.event.inputs.profiling_sampling_rate }} - report_format=${{ github.event.inputs.report_format }} - if [ "$report_format" == "flamegraph" ]; then - report_name="${{ github.run_id }}.svg" - elif [ "$report_format" == "speedscope" ]; then - report_name="${{ github.run_id }}.speedscope.json" - else - echo "Unsupported report format: $report_format" - exit 1 - fi - docker exec -d $container_id sh -c "RUST_BACKTRACE=1 py-spy record -p $worker_pid -o /home/profiling_reports/$report_name -f $report_format --idle --nonblocking --rate $profiling_sampling_rate > /home/site/wwwroot/py-spy.log 2>&1 &" - sleep 2 # Give it a moment to start - py_spy_id=$(docker exec $container_id sh -c "ps aux | grep '[p]y-spy record'" | awk '{print $2}') - - echo "py_spy_id=$py_spy_id" >> $GITHUB_ENV - echo "report_name=$report_name" >> $GITHUB_ENV - - - name: Run Throughput tests - run: | - chmod 755 .ci/perf_tests/run-perftests.sh - .ci/perf_tests/run-perftests.sh localhost $PORT ${{ env.TESTS_DIR_PATH }} ${{ matrix.test_to_run }} - - - name: Stop profiling and generate report - run: | - echo "Tests completed, terminating py-spy..." - docker exec $container_id cat /home/site/wwwroot/py-spy.log - docker exec $container_id sh -c "kill -2 $py_spy_id" - sleep 2 - mkdir profiling_reports - chmod 777 profiling_reports - docker cp $container_id:/home/profiling_reports/$report_name profiling_reports - - name: Upload SVG to Azure Blob Storage - uses: bacongobbler/azure-blob-storage-upload@v3.0.0 - with: - source_dir: 'profiling_reports' # Directory containing the $report_name file - container_name: 'profiling' - connection_string: ${{ secrets.AZURE_STORAGE_CONNECTION_STRING }} - sync: 'false' - - - name: Output Blob URL - run: | - blob_url="https://${{ env.PYTHON_FUNCTION_PROFILING_STORAGE_ACCT }}.blob.core.windows.net/profiling/${{ env.report_name }}" - echo "You can view the Blob at: $blob_url" - - - name: Upload profiling result to artifact - uses: actions/upload-artifact@v2 - with: - name: py-spy-output - path: 'profiling_reports/${{ env.report_name }}' - - - name: Create Artifact Link - run: | - echo "You can download the SVG artifact from the Actions run page." - echo "Link to the Actions run page: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}" diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index 7324df62..00000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,217 +0,0 @@ -name: $(Date:yyyyMMdd).$(Rev:r) - -trigger: -- release/3.* -- release/4.* -- dev - -variables: - patchBuildNumberForDev: $(Build.BuildNumber) - PROD_V4_WORKER_PY: 'python/prodV4/worker.py' - -jobs: -- job: Build_WINDOWS_X64 - pool: - name: '1ES-Hosted-AzFunc' #MMS2019TLS for Windows2019 or MMSUbuntu20.04TLS for ubuntu - demands: - - ImageOverride -equals MMS2019TLS - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: $(PROD_V4_WORKER_PY) - Python38V4: - pythonVersion: '3.8' - workerPath: $(PROD_V4_WORKER_PY) - Python39V4: - pythonVersion: '3.9' - workerPath: $(PROD_V4_WORKER_PY) - Python310V4: - pythonVersion: '3.10' - workerPath: $(PROD_V4_WORKER_PY) - Python311V4: - pythonVersion: '3.11' - workerPath: $(PROD_V4_WORKER_PY) - steps: - - template: pack/templates/win_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - architecture: 'x64' - artifactName: '$(pythonVersion)_WINDOWS_X64' -- job: Build_WINDOWS_X86 - pool: - name: '1ES-Hosted-AzFunc' #MMS2019TLS for Windows2019 or MMSUbuntu20.04TLS for ubuntu - demands: - - ImageOverride -equals MMS2019TLS - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: $(PROD_V4_WORKER_PY) - Python38V4: - pythonVersion: '3.8' - workerPath: $(PROD_V4_WORKER_PY) - Python39V4: - pythonVersion: '3.9' - workerPath: $(PROD_V4_WORKER_PY) - Python310V4: - pythonVersion: '3.10' - workerPath: $(PROD_V4_WORKER_PY) - Python311V4: - pythonVersion: '3.11' - workerPath: $(PROD_V4_WORKER_PY) - steps: - - template: pack/templates/win_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - architecture: 'x86' - artifactName: '$(pythonVersion)_WINDOWS_x86' -- job: Build_LINUX_X64 - pool: - name: '1ES-Hosted-AzFunc' # MMS2019TLS for Windows2019 or MMSUbuntu20.04TLS for ubuntu - demands: - - ImageOverride -equals MMSUbuntu20.04TLS - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: $(PROD_V4_WORKER_PY) - Python38V4: - pythonVersion: '3.8' - workerPath: $(PROD_V4_WORKER_PY) - Python39V4: - pythonVersion: '3.9' - workerPath: $(PROD_V4_WORKER_PY) - Python310V4: - pythonVersion: '3.10' - workerPath: $(PROD_V4_WORKER_PY) - Python311V4: - pythonVersion: '3.11' - workerPath: $(PROD_V4_WORKER_PY) - steps: - - template: pack/templates/nix_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - artifactName: '$(pythonVersion)_LINUX_X64' -- job: Build_OSX_X64 - pool: - vmImage: 'macOS-latest' - strategy: - matrix: - Python37V4: - pythonVersion: '3.7' - workerPath: $(PROD_V4_WORKER_PY) - Python38V4: - pythonVersion: '3.8' - workerPath: $(PROD_V4_WORKER_PY) - Python39V4: - pythonVersion: '3.9' - workerPath: $(PROD_V4_WORKER_PY) - Python310V4: - pythonVersion: '3.10' - workerPath: $(PROD_V4_WORKER_PY) - Python311V4: - pythonVersion: '3.11' - workerPath: $(PROD_V4_WORKER_PY) - steps: - - template: pack/templates/nix_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - artifactName: '$(pythonVersion)_OSX_X64' -- job: Build_OSX_ARM64 - pool: - vmImage: 'macOS-latest' - strategy: - matrix: - Python39V4: - pythonVersion: '3.9' - workerPath: $(PROD_V4_WORKER_PY) - Python310V4: - pythonVersion: '3.10' - workerPath: $(PROD_V4_WORKER_PY) - Python311V4: - pythonVersion: '3.11' - workerPath: $(PROD_V4_WORKER_PY) - steps: - - template: pack/templates/macos_64_env_gen.yml - parameters: - pythonVersion: '$(pythonVersion)' - workerPath: '$(workerPath)' - artifactName: '$(pythonVersion)_OSX_ARM64' - -- job: PackageWorkers - dependsOn: ['Build_WINDOWS_X64', 'Build_WINDOWS_X86', 'Build_LINUX_X64', 'Build_OSX_X64', 'Build_OSX_ARM64'] - condition: or(startsWith(variables['Build.SourceBranch'], 'refs/heads/release/'), eq(variables['Build.SourceBranch'], 'refs/heads/dev'), eq(variables['GeneratePackage'], True)) - pool: - name: '1ES-Hosted-AzFunc' - demands: - - ImageOverride -equals MMS2019TLS - steps: - - bash: | - echo "Releasing from $BUILD_SOURCEBRANCHNAME" - sudo apt-get install -y jq - - if [[ $BUILD_SOURCEBRANCHNAME = 4\.* ]] - then - echo "Generating V4 Release Package for $BUILD_SOURCEBRANCHNAME" - NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" - WKVERSION="$BUILD_SOURCEBRANCHNAME" - elif [[ $BUILD_SOURCEBRANCHNAME = dev ]] - then - echo "Generating V4 Integration Test Package for $BUILD_SOURCEBRANCHNAME" - VERSION=$(cat azure_functions_worker/version.py | tail -1 | cut -d' ' -f3 | sed "s/'//g") - NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" - WKVERSION="$VERSION-$(patchBuildNumberForDev)" - else - # this is only to test nuget related workflow because we are setting nuspec here - echo "Generating Integration Test Package for $BUILD_SOURCEBRANCHNAME for testing purpose" - LATEST_TAG=$(curl https://api.github.com/repos/Azure/azure-functions-python-worker/tags -s | jq '.[0].name' | sed 's/\"//g' | cut -d'.' -f-2) - NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" - # Only required for Integration Test. Version number contains date (e.g. 3.1.2.20211028-dev) - WKVERSION="3.$LATEST_TAG-$(BUILD_BUILDID)-TEST" - echo "No Matching Release Tag For $BUILD_SOURCEBRANCH" - fi - - echo "##vso[task.setvariable variable=nuspec_path]$NUSPEC" - echo "##vso[task.setvariable variable=worker_version]$WKVERSION" - displayName: "Generate Worker NuGet Package for Release $BUILD_SOURCEBRANCHNAME" - - task: DownloadBuildArtifacts@0 - inputs: - buildType: 'current' - downloadType: 'specific' - downloadPath: '$(Build.SourcesDirectory)' - - task: ManifestGeneratorTask@0 - displayName: 'SBOM Generation Task' - inputs: - BuildDropPath: '$(Build.ArtifactStagingDirectory)' - BuildComponentPath: '$(Build.SourcesDirectory)' - Verbosity: 'Verbose' - - task: CopyFiles@2 - inputs: - SourceFolder: '$(Build.ArtifactStagingDirectory)' - Contents: '**' - TargetFolder: '$(Build.SourcesDirectory)' - - task: NuGetCommand@2 - inputs: - command: pack - packagesToPack: '$(nuspec_path)' - packDestination: $(Build.ArtifactStagingDirectory) - versioningScheme: 'byEnvVar' - versionEnvVar: WORKER_VERSION - - task: PublishBuildArtifacts@1 - inputs: - pathtoPublish: '$(Build.ArtifactStagingDirectory)' - artifactName: 'PythonWorker' - - task: NuGetCommand@2 - condition: eq(variables['UPLOADPACKAGETOPRERELEASEFEED'], true) - inputs: - command: 'push' - packagesToPush: '$(Build.ArtifactStagingDirectory)/**/*.nupkg;!$(Build.ArtifactStagingDirectory)/**/*.symbols.nupkg' - nuGetFeedType: 'internal' - publishVstsFeed: 'e6a70c92-4128-439f-8012-382fe78d6396/f37f760c-aebd-443e-9714-ce725cd427df' - allowPackageConflicts: true - displayName: '[Integration Test] Push NuGet package to the AzureFunctionsPreRelease feed' diff --git a/eng/ci/core-tools-tests.yml b/eng/ci/core-tools-tests.yml new file mode 100644 index 00000000..8ec3dfd6 --- /dev/null +++ b/eng/ci/core-tools-tests.yml @@ -0,0 +1,27 @@ +resources: + repositories: + - repository: 1es + type: git + name: 1ESPipelineTemplates/1ESPipelineTemplates + ref: refs/tags/release + - repository: eng + type: git + name: engineering + ref: refs/tags/release + +variables: + - template: ci/variables/build.yml@eng + - template: /ci/variables/cfs.yml@eng + +extends: + template: v1/1ES.Unofficial.PipelineTemplate.yml@1es + parameters: + pool: + name: 1es-pool-azfunc + image: 1es-windows-2022 + os: windows + + stages: + - stage: RunCoreToolsTests + jobs: + - template: /eng/templates/official/jobs/ci-core-tools-tests.yml@self diff --git a/eng/ci/custom-image-tests.yml b/eng/ci/custom-image-tests.yml new file mode 100644 index 00000000..667d58a8 --- /dev/null +++ b/eng/ci/custom-image-tests.yml @@ -0,0 +1,27 @@ +resources: + repositories: + - repository: 1es + type: git + name: 1ESPipelineTemplates/1ESPipelineTemplates + ref: refs/tags/release + - repository: eng + type: git + name: engineering + ref: refs/tags/release + +variables: + - template: ci/variables/build.yml@eng + - template: /ci/variables/cfs.yml@eng + +extends: + template: v1/1ES.Unofficial.PipelineTemplate.yml@1es + parameters: + pool: + name: 1es-pool-azfunc + image: 1es-windows-2022 + os: windows + + stages: + - stage: RunCustomDockerImageTests + jobs: + - template: /eng/templates/official/jobs/ci-custom-image-tests.yml@self diff --git a/eng/ci/docker-consumption-tests.yml b/eng/ci/docker-consumption-tests.yml new file mode 100644 index 00000000..631a97d9 --- /dev/null +++ b/eng/ci/docker-consumption-tests.yml @@ -0,0 +1,38 @@ +# CI only, does not trigger on PRs. +pr: none + +schedules: + - cron: "0 10 * * *" + displayName: Run everyday at 5 AM CST + branches: + include: + - dev + always: true + +resources: + repositories: + - repository: 1es + type: git + name: 1ESPipelineTemplates/1ESPipelineTemplates + ref: refs/tags/release + - repository: eng + type: git + name: engineering + ref: refs/tags/release + +variables: + - template: ci/variables/build.yml@eng + - template: /ci/variables/cfs.yml@eng + +extends: + template: v1/1ES.Unofficial.PipelineTemplate.yml@1es + parameters: + pool: + name: 1es-pool-azfunc + image: 1es-windows-2022 + os: windows + + stages: + - stage: RunDockerConsumptionTests + jobs: + - template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self diff --git a/eng/ci/docker-dedicated-tests.yml b/eng/ci/docker-dedicated-tests.yml new file mode 100644 index 00000000..ca3edd22 --- /dev/null +++ b/eng/ci/docker-dedicated-tests.yml @@ -0,0 +1,38 @@ +# CI only, does not trigger on PRs. +pr: none + +schedules: + - cron: "0 11 * * *" + displayName: Run everyday at 6 AM CST + branches: + include: + - dev + always: true + +resources: + repositories: + - repository: 1es + type: git + name: 1ESPipelineTemplates/1ESPipelineTemplates + ref: refs/tags/release + - repository: eng + type: git + name: engineering + ref: refs/tags/release + +variables: + - template: ci/variables/build.yml@eng + - template: /ci/variables/cfs.yml@eng + +extends: + template: v1/1ES.Unofficial.PipelineTemplate.yml@1es + parameters: + pool: + name: 1es-pool-azfunc + image: 1es-windows-2022 + os: windows + + stages: + - stage: RunDockerDedicatedTests + jobs: + - template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self diff --git a/eng/ci/integration-tests.yml b/eng/ci/integration-tests.yml new file mode 100644 index 00000000..420a27ec --- /dev/null +++ b/eng/ci/integration-tests.yml @@ -0,0 +1,49 @@ +trigger: none # ensure this is not ran as a CI build + +pr: + branches: + include: + - dev + - release/* + +schedules: + - cron: "0 8 * * 1,2,3,4,5" + displayName: Monday to Friday 3 AM CST build + branches: + include: + - dev + always: true + +resources: + repositories: + - repository: 1es + type: git + name: 1ESPipelineTemplates/1ESPipelineTemplates + ref: refs/tags/release + - repository: eng + type: git + name: engineering + ref: refs/tags/release + +variables: + - template: ci/variables/build.yml@eng + - template: /ci/variables/cfs.yml@eng + +extends: + template: v1/1ES.Unofficial.PipelineTemplate.yml@1es + parameters: + pool: + name: 1es-pool-azfunc + image: 1es-windows-2022 + os: windows + + stages: + - stage: RunE2ETests + jobs: + - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self + - stage: RunLCTests + jobs: + - template: /eng/templates/official/jobs/ci-lc-tests.yml@self + - stage: RunUnitTests + jobs: + - template: /eng/templates/jobs/ci-unit-tests.yml@self diff --git a/eng/ci/official-build.yml b/eng/ci/official-build.yml index bb6afa21..11e7bf65 100644 --- a/eng/ci/official-build.yml +++ b/eng/ci/official-build.yml @@ -42,9 +42,25 @@ extends: stages: - stage: Build jobs: - - template: /eng/ci/templates/official/jobs/build-artifacts.yml@self + - template: /eng/templates/official/jobs/build-artifacts.yml@self - - stage: RunTests + - stage: RunE2ETests dependsOn: Build jobs: - - template: /eng/ci/templates/official/jobs/ci-e2e-tests.yml@self \ No newline at end of file + - template: /eng/templates/official/jobs/ci-e2e-tests.yml@self + - stage: RunUnitTests + dependsOn: Build + jobs: + - template: /eng/templates/jobs/ci-unit-tests.yml@self + - stage: RunDockerConsumptionTests + dependsOn: Build + jobs: + - template: /eng/templates/official/jobs/ci-docker-consumption-tests.yml@self + - stage: RunDockerDedicatedTests + dependsOn: Build + jobs: + - template: /eng/templates/official/jobs/ci-docker-dedicated-tests.yml@self + - stage: RunLinuxConsumptionTests + dependsOn: Build + jobs: + - template: /eng/templates/official/jobs/ci-lc-tests.yml@self diff --git a/eng/ci/public-build.yml b/eng/ci/public-build.yml index 929634b6..54ce97b2 100644 --- a/eng/ci/public-build.yml +++ b/eng/ci/public-build.yml @@ -2,27 +2,27 @@ trigger: batch: true branches: include: - - dev + - dev pr: branches: include: - - dev + - dev schedules: - - cron: '0 0 * * MON' - displayName: At 12:00 AM, only on Monday - branches: - include: - - dev - always: true +- cron: '0 0 * * MON' + displayName: At 12:00 AM, only on Monday + branches: + include: + - dev + always: true resources: repositories: - - repository: 1es - type: git - name: 1ESPipelineTemplates/1ESPipelineTemplates - ref: refs/tags/release + - repository: 1es + type: git + name: 1ESPipelineTemplates/1ESPipelineTemplates + ref: refs/tags/release extends: template: v1/1ES.Unofficial.PipelineTemplate.yml@1es @@ -31,8 +31,18 @@ extends: name: 1es-pool-azfunc-public image: 1es-windows-2022 os: windows + parameters: + sdl: + codeql: + compiled: + enabled: true # still only runs for default branch + runSourceLanguagesInSourceAnalysis: true stages: - - stage: Build - jobs: - - template: /eng/ci/templates/jobs/build.yml@self \ No newline at end of file + - stage: Build + jobs: + - template: /eng/templates/jobs/build.yml@self + - stage: RunUnitTests + dependsOn: Build + jobs: + - template: /eng/templates/jobs/ci-unit-tests.yml@self \ No newline at end of file diff --git a/eng/templates/jobs/build.yml b/eng/templates/jobs/build.yml index dadd88fb..1c96122f 100644 --- a/eng/templates/jobs/build.yml +++ b/eng/templates/jobs/build.yml @@ -4,8 +4,8 @@ jobs: pool: name: 1es-pool-azfunc-public - image: 1es-windows-2022 - os: windows + image: 1es-ubuntu-22.04 + os: linux steps: - task: UsePythonVersion@0 diff --git a/eng/templates/jobs/ci-unit-tests.yml b/eng/templates/jobs/ci-unit-tests.yml new file mode 100644 index 00000000..314503d3 --- /dev/null +++ b/eng/templates/jobs/ci-unit-tests.yml @@ -0,0 +1,42 @@ +jobs: + - job: "TestPython" + displayName: "Run Python Unit Tests" + + strategy: + matrix: + Python37: + PYTHON_VERSION: '3.7' + Python38: + PYTHON_VERSION: '3.8' + Python39: + PYTHON_VERSION: '3.9' + Python310: + PYTHON_VERSION: '3.10' + Python311: + PYTHON_VERSION: '3.11' + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: $(PYTHON_VERSION) + - task: UseDotNet@2 + displayName: 'Install .NET 8' + inputs: + version: 8.0.x + - bash: | + python -m pip install --upgrade pip + python -m pip install -U azure-functions --pre + + python -m pip install -U -e .[dev] + if [[ $(PYTHON_VERSION) != "3.7" ]]; then + python -m pip install --pre -U -e .[test-http-v2] + fi + + python setup.py build + python setup.py webhost --branch-name=dev + python setup.py extension + displayName: "Install dependencies" + - bash: | + python -m pytest -q -n auto --dist loadfile --reruns 4 --instafail --cov=./azure_functions_worker --cov-report xml --cov-branch tests/unittests + displayName: "Running $(PYTHON_VERSION) Unit Tests" + \ No newline at end of file diff --git a/eng/templates/official/jobs/build-artifacts.yml b/eng/templates/official/jobs/build-artifacts.yml index 0d9c4164..60282b55 100644 --- a/eng/templates/official/jobs/build-artifacts.yml +++ b/eng/templates/official/jobs/build-artifacts.yml @@ -1,80 +1,232 @@ jobs: - - job: "Build" - displayName: "Build python worker" +- job: Build_WINDOWS_X64 + pool: + name: 1es-pool-azfunc-public + image: 1es-windows-2022 + os: windows + strategy: + matrix: + Python37V4: + pythonVersion: '3.7' + workerPath: 'python/prodV4/worker.py' + Python38V4: + pythonVersion: '3.8' + workerPath: 'python/prodV4/worker.py' + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.SourcesDirectory) + artifactName: "$(pythonVersion)_WINDOWS_X64" + steps: + - template: ../../../../pack/templates/win_env_gen.yml + parameters: + pythonVersion: '$(pythonVersion)' + workerPath: '$(workerPath)' + architecture: 'x64' + artifactName: '$(pythonVersion)_WINDOWS_X64' +- job: Build_WINDOWS_X86 + pool: + name: 1es-pool-azfunc-public + image: 1es-windows-2022 + os: windows + strategy: + matrix: + Python37V4: + pythonVersion: '3.7' + workerPath: 'python/prodV4/worker.py' + Python38V4: + pythonVersion: '3.8' + workerPath: 'python/prodV4/worker.py' + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.SourcesDirectory) + artifactName: "$(pythonVersion)_WINDOWS_X86" + steps: + - template: ../../../../pack/templates/win_env_gen.yml + parameters: + pythonVersion: '$(pythonVersion)' + workerPath: '$(workerPath)' + architecture: 'x86' + artifactName: '$(pythonVersion)_WINDOWS_x86' +- job: Build_LINUX_X64 + pool: + name: 1es-pool-azfunc + image: 1es-ubuntu-22.04 + os: linux + strategy: + matrix: + Python37V4: + pythonVersion: '3.7' + workerPath: 'python/prodV4/worker.py' + Python38V4: + pythonVersion: '3.8' + workerPath: 'python/prodV4/worker.py' + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.SourcesDirectory) + artifactName: "$(pythonVersion)_LINUX_X64" + steps: + - template: ../../../../pack/templates/nix_env_gen.yml + parameters: + pythonVersion: '$(pythonVersion)' + workerPath: '$(workerPath)' + artifactName: '$(pythonVersion)_LINUX_X64' +- job: Build_OSX_X64 + pool: + name: Azure Pipelines + image: macOS-latest + os: macOS + strategy: + matrix: + Python37V4: + pythonVersion: '3.7' + workerPath: 'python/prodV4/worker.py' + Python38V4: + pythonVersion: '3.8' + workerPath: 'python/prodV4/worker.py' + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.SourcesDirectory) + artifactName: "$(pythonVersion)_OSX_X64" + steps: + - template: ../../../../pack/templates/nix_env_gen.yml + parameters: + pythonVersion: '$(pythonVersion)' + workerPath: '$(workerPath)' + artifactName: '$(pythonVersion)_OSX_X64' +- job: Build_OSX_ARM64 + pool: + name: Azure Pipelines + image: macOS-latest + os: macOS + strategy: + matrix: + Python39V4: + pythonVersion: '3.9' + workerPath: 'python/prodV4/worker.py' + Python310V4: + pythonVersion: '3.10' + workerPath: 'python/prodV4/worker.py' + Python311V4: + pythonVersion: '3.11' + workerPath: 'python/prodV4/worker.py' + templateContext: + outputParentDirectory: $(Build.ArtifactStagingDirectory) + outputs: + - output: pipelineArtifact + targetPath: $(Build.SourcesDirectory) + artifactName: "$(pythonVersion)_OSX_ARM4" + steps: + - template: ../../../../pack/templates/macos_64_env_gen.yml + parameters: + pythonVersion: '$(pythonVersion)' + workerPath: '$(workerPath)' + artifactName: '$(pythonVersion)_OSX_ARM64' - templateContext: +- job: PackageWorkers + dependsOn: ['Build_WINDOWS_X64', 'Build_WINDOWS_X86', 'Build_LINUX_X64', 'Build_OSX_X64', 'Build_OSX_ARM64'] + templateContext: outputParentDirectory: $(Build.ArtifactStagingDirectory) outputs: - - output: pipelineArtifact - targetPath: $(Build.ArtifactStagingDirectory) - artifactName: "drop" - output: nuget condition: and(succeeded(), eq(variables['Build.SourceBranch'], 'refs/heads/dev'), eq(variables['UPLOADPACKAGETOPRERELEASEFEED'], true)) useDotNetTask: false - packagesToPush: "$(Build.ArtifactStagingDirectory)/**/*.nupkg;!$(Build.ArtifactStagingDirectory)/**/*.symbols.nupkg" + packagesToPush: '$(Build.ArtifactStagingDirectory)/**/*.nupkg;!$(Build.ArtifactStagingDirectory)/**/*.symbols.nupkg' packageParentPath: "$(Build.ArtifactStagingDirectory)" publishVstsFeed: "e6a70c92-4128-439f-8012-382fe78d6396/f37f760c-aebd-443e-9714-ce725cd427df" nuGetFeedType: "internal" allowPackageConflicts: true + steps: + - bash: | + echo "Releasing from $BUILD_SOURCEBRANCHNAME" + sudo apt-get install -y jq - pool: - name: 1es-pool-azfunc - image: 1es-windows-2022 - os: windows + if [[ $BUILD_SOURCEBRANCHNAME = 4\.* ]] + then + echo "Generating V4 Release Package for $BUILD_SOURCEBRANCHNAME" + NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" + WKVERSION="$BUILD_SOURCEBRANCHNAME" + elif [[ $BUILD_SOURCEBRANCHNAME = dev ]] + then + echo "Generating V4 Integration Test Package for $BUILD_SOURCEBRANCHNAME" + VERSION=$(cat azure_functions_worker/version.py | tail -1 | cut -d' ' -f3 | sed "s/'//g") + NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" + WKVERSION="$VERSION-$(Build.BuildNumber)" + else + # this is only to test nuget related workflow because we are setting nuspec here + echo "Generating Integration Test Package for $BUILD_SOURCEBRANCHNAME for testing purpose" + LATEST_TAG=$(curl https://api.github.com/repos/Azure/azure-functions-python-worker/tags -s | jq '.[0].name' | sed 's/\"//g' | cut -d'.' -f-2) + NUSPEC="pack\Microsoft.Azure.Functions.V4.PythonWorker.nuspec" + # Only required for Integration Test. Version number contains date (e.g. 3.1.2.20211028-dev) + WKVERSION="3.$LATEST_TAG-$(BUILD_BUILDID)-TEST" + echo "No Matching Release Tag For $BUILD_SOURCEBRANCH" + fi - variables: - ${{ if contains(variables['Build.SourceBranch'], '/tags/' ) }}: - isTagTemp: true - isTag: $[variables.isTagTemp] - - strategy: - matrix: - Python37V4: - pythonVersion: "3.7" - workerPath: $(PROD_V4_WORKER_PY) - Python38V4: - pythonVersion: "3.8" - workerPath: $(PROD_V4_WORKER_PY) - Python39V4: - pythonVersion: "3.9" - workerPath: $(PROD_V4_WORKER_PY) - Python310V4: - pythonVersion: "3.10" - workerPath: $(PROD_V4_WORKER_PY) - Python311V4: - pythonVersion: "3.11" - workerPath: $(PROD_V4_WORKER_PY) - - steps: - - template: pack/templates/win_env_gen.yml - displayName: "Build Windows x64" - parameters: - pythonVersion: "$(pythonVersion)" - workerPath: "$(workerPath)" - architecture: "x64" - artifactName: "$(pythonVersion)_WINDOWS_X64" - - template: pack/templates/win_env_gen.yml - displayName: "Build Windows x86" - parameters: - pythonVersion: "$(pythonVersion)" - workerPath: "$(workerPath)" - architecture: "x86" - artifactName: "$(pythonVersion)_WINDOWS_x86" - - template: pack/templates/nix_env_gen.yml - displayName: "Build Linux x64" - parameters: - pythonVersion: "$(pythonVersion)" - workerPath: "$(workerPath)" - artifactName: "$(pythonVersion)_LINUX_X64" - - template: pack/templates/nix_env_gen.yml - displayName: "Build OSX X64" - parameters: - pythonVersion: "$(pythonVersion)" - workerPath: "$(workerPath)" - artifactName: "$(pythonVersion)_OSX_X64" - - template: pack/templates/macos_64_env_gen.yml - displayName: "Build OSX ARM64" - parameters: - pythonVersion: "$(pythonVersion)" - workerPath: "$(workerPath)" - artifactName: "$(pythonVersion)_OSX_ARM64" \ No newline at end of file + echo "##vso[task.setvariable variable=nuspec_path]$NUSPEC" + echo "##vso[task.setvariable variable=worker_version]$WKVERSION" + displayName: "Generate Worker NuGet Package for Release $BUILD_SOURCEBRANCHNAME" + - task: DownloadPipelineArtifact@2 + inputs: + buildType: 'current' + targetPath: '$(Build.SourcesDirectory)' + - task: ManifestGeneratorTask@0 + displayName: 'SBOM Generation Task' + inputs: + BuildDropPath: '$(Build.ArtifactStagingDirectory)' + BuildComponentPath: '$(Build.SourcesDirectory)' + Verbosity: 'Verbose' + - task: CopyFiles@2 + inputs: + SourceFolder: '$(Build.ArtifactStagingDirectory)' + Contents: '**' + TargetFolder: '$(Build.SourcesDirectory)' + - task: NuGetCommand@2 + inputs: + command: pack + packagesToPack: '$(nuspec_path)' + packDestination: $(Build.ArtifactStagingDirectory) + versioningScheme: 'byEnvVar' + versionEnvVar: WORKER_VERSION diff --git a/eng/templates/official/jobs/ci-core-tools-tests.yml b/eng/templates/official/jobs/ci-core-tools-tests.yml new file mode 100644 index 00000000..3e8a9b62 --- /dev/null +++ b/eng/templates/official/jobs/ci-core-tools-tests.yml @@ -0,0 +1,35 @@ +jobs: + - job: "TestPython" + displayName: "Run Python Core Tools E2E Tests" + + pool: + name: 1es-pool-azfunc + image: 1es-ubuntu-22.04 + os: linux + + steps: + - task: UsePythonVersion@0 + displayName: 'Install Python' + inputs: + versionSpec: "3.10" + addToPath: true + - task: UseDotNet@2 + displayName: 'Install DotNet 3' + inputs: + packageType: 'sdk' + version: "3.1.x" + - task: UseDotNet@2 + displayName: 'Install DotNet 6' + inputs: + packageType: 'sdk' + version: "6.x" + - pwsh: '$(Build.SourcesDirectory)/.ci/e2e_integration_test/start-e2e.ps1' + env: + AzureWebJobsStorage: $(LinuxStorageConnectionString311) + AzureWebJobsCosmosDBConnectionString: $(LinuxCosmosDBConnectionString311) + AzureWebJobsEventHubConnectionString: $(LinuxEventHubConnectionString311) + AzureWebJobsServiceBusConnectionString: $(LinuxServiceBusConnectionString311) + AzureWebJobsSqlConnectionString: $(LinuxSqlConnectionString311) + AzureWebJobsEventGridTopicUri: $(LinuxEventGridTopicUriString311) + AzureWebJobsEventGridConnectionKey: $(LinuxEventGridConnectionKeyString311) + displayName: 'Running Python Language Worker E2E Tests' diff --git a/eng/templates/official/jobs/ci-custom-image-tests.yml b/eng/templates/official/jobs/ci-custom-image-tests.yml new file mode 100644 index 00000000..71897506 --- /dev/null +++ b/eng/templates/official/jobs/ci-custom-image-tests.yml @@ -0,0 +1,37 @@ +jobs: + - job: "TestPython" + displayName: "Run Python Docker Custom Tests" + + pool: + name: 1es-pool-azfunc + image: 1es-ubuntu-22.04 + os: linux + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: $(CUSTOM_PYTHON_VERSION) + - bash: | + python -m pip install -U -e .[dev] + if [[ $(PYTHON_VERSION) != "3.7" ]]; then + python -m pip install --pre -U -e .[test-http-v2] + fi + if [[ $(PYTHON_VERSION) != "3.7" && $(PYTHON_VERSION) != "3.8" ]]; then + python -m pip install --pre -U -e .[test-deferred-bindings] + fi + python setup.py build + displayName: 'Install dependencies' + - bash: | + python -m pytest --reruns 4 -vv --instafail tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests + env: + DEDICATED_DOCKER_TEST: $(CUSTOM_DED_IMAGE) + CONSUMPTION_DOCKER_TEST: $(CUSTOM_CON_IMAGE) + IMAGE_NAME: $(CUSTOM_IMAGE_NAME) + AzureWebJobsStorage: $(LinuxStorageConnectionString311) + AzureWebJobsCosmosDBConnectionString: $(LinuxCosmosDBConnectionString311) + AzureWebJobsEventHubConnectionString: $(LinuxEventHubConnectionString311) + AzureWebJobsServiceBusConnectionString: $(LinuxServiceBusConnectionString311) + AzureWebJobsSqlConnectionString: $(LinuxSqlConnectionString311) + AzureWebJobsEventGridTopicUri: $(LinuxEventGridTopicUriString311) + AzureWebJobsEventGridConnectionKey: $(LinuxEventGridConnectionKeyString311) + displayName: "Running Python DockerCustom tests" \ No newline at end of file diff --git a/eng/templates/official/jobs/ci-docker-consumption-tests.yml b/eng/templates/official/jobs/ci-docker-consumption-tests.yml new file mode 100644 index 00000000..c769bfd4 --- /dev/null +++ b/eng/templates/official/jobs/ci-docker-consumption-tests.yml @@ -0,0 +1,72 @@ +jobs: + - job: "TestPython" + displayName: "Run Python Docker Consumption Tests" + + pool: + name: 1es-pool-azfunc + image: 1es-ubuntu-22.04 + os: linux + + strategy: + matrix: + Python38: + PYTHON_VERSION: '3.8' + STORAGE_CONNECTION: $(LinuxStorageConnectionString38) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38) + SQL_CONNECTION: $(LinuxSqlConnectionString38) + EVENTGRID_URI: $(LinuxEventGridTopicUriString38) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38) + Python39: + PYTHON_VERSION: '3.9' + STORAGE_CONNECTION: $(LinuxStorageConnectionString39) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39) + SQL_CONNECTION: $(LinuxSqlConnectionString39) + EVENTGRID_URI: $(LinuxEventGridTopicUriString39) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39) + Python310: + PYTHON_VERSION: '3.10' + STORAGE_CONNECTION: $(LinuxStorageConnectionString310) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310) + SQL_CONNECTION: $(LinuxSqlConnectionString310) + EVENTGRID_URI: $(LinuxEventGridTopicUriString310) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310) + Python311: + PYTHON_VERSION: '3.11' + STORAGE_CONNECTION: $(LinuxStorageConnectionString311) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311) + SQL_CONNECTION: $(LinuxSqlConnectionString311) + EVENTGRID_URI: $(LinuxEventGridTopicUriString311) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311) + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: $(PYTHON_VERSION) + - bash: | + python -m pip install -U -e .[dev] + python -m pip install --pre -U -e .[test-http-v2] + if [[ $(PYTHON_VERSION) != "3.8" ]]; then + python -m pip install --pre -U -e .[test-deferred-bindings] + fi + python setup.py build + displayName: 'Install dependencies' + - bash: | + python -m pytest --reruns 4 -vv --instafail tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests + env: + CONSUMPTION_DOCKER_TEST: "true" + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + displayName: "Running $(PYTHON_VERSION) Docker Consumption tests" \ No newline at end of file diff --git a/eng/templates/official/jobs/ci-docker-dedicated-tests.yml b/eng/templates/official/jobs/ci-docker-dedicated-tests.yml new file mode 100644 index 00000000..f1374c8d --- /dev/null +++ b/eng/templates/official/jobs/ci-docker-dedicated-tests.yml @@ -0,0 +1,72 @@ +jobs: + - job: "TestPython" + displayName: "Run Python Docker Dedicated Tests" + + pool: + name: 1es-pool-azfunc + image: 1es-ubuntu-22.04 + os: linux + + strategy: + matrix: + Python38: + PYTHON_VERSION: '3.8' + STORAGE_CONNECTION: $(LinuxStorageConnectionString38) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38) + SQL_CONNECTION: $(LinuxSqlConnectionString38) + EVENTGRID_URI: $(LinuxEventGridTopicUriString38) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38) + Python39: + PYTHON_VERSION: '3.9' + STORAGE_CONNECTION: $(LinuxStorageConnectionString39) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39) + SQL_CONNECTION: $(LinuxSqlConnectionString39) + EVENTGRID_URI: $(LinuxEventGridTopicUriString39) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39) + Python310: + PYTHON_VERSION: '3.10' + STORAGE_CONNECTION: $(LinuxStorageConnectionString310) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310) + SQL_CONNECTION: $(LinuxSqlConnectionString310) + EVENTGRID_URI: $(LinuxEventGridTopicUriString310) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310) + Python311: + PYTHON_VERSION: '3.11' + STORAGE_CONNECTION: $(LinuxStorageConnectionString311) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311) + SQL_CONNECTION: $(LinuxSqlConnectionString311) + EVENTGRID_URI: $(LinuxEventGridTopicUriString311) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311) + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: $(PYTHON_VERSION) + - bash: | + python -m pip install -U -e .[dev] + python -m pip install --pre -U -e .[test-http-v2] + if [[ $(PYTHON_VERSION) != "3.8" ]]; then + python -m pip install --pre -U -e .[test-deferred-bindings] + fi + python setup.py build + displayName: 'Install dependencies' + - bash: | + python -m pytest --reruns 4 -vv --instafail tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests + env: + DEDICATED_DOCKER_TEST: "true" + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + displayName: "Running $(PYTHON_VERSION) Docker Dedicated tests" \ No newline at end of file diff --git a/eng/templates/official/jobs/ci-e2e-tests.yml b/eng/templates/official/jobs/ci-e2e-tests.yml index 3ebdaca6..a2d34d21 100644 --- a/eng/templates/official/jobs/ci-e2e-tests.yml +++ b/eng/templates/official/jobs/ci-e2e-tests.yml @@ -9,51 +9,51 @@ jobs: strategy: matrix: - python-37-fwpc: + Python37: PYTHON_VERSION: '3.7' - TEST_TYPE: 'fwpc-e2e-tests' - python-37-e2e: - PYTHON_VERSION: '3.7' - TEST_TYPE: 'e2e-tests' - python-37-deferred-bindings: - PYTHON_VERSION: '3.7' - TEST_TYPE: 'deferred-bindings-e2e-tests' - python-38-fwpc: - PYTHON_VERSION: '3.8' - TEST_TYPE: 'fwpc-e2e-tests' - python-38-e2e: + STORAGE_CONNECTION: $(LinuxStorageConnectionString37) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString37) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString37) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString37) + SQL_CONNECTION: $(LinuxSqlConnectionString37) + EVENTGRID_URI: $(LinuxEventGridTopicUriString37) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString37) + Python38: PYTHON_VERSION: '3.8' - TEST_TYPE: 'e2e-tests' - python-38-deferred-bindings: - PYTHON_VERSION: '3.8' - TEST_TYPE: 'deferred-bindings-e2e-tests' - python-39-fwpc: - PYTHON_VERSION: '3.9' - TEST_TYPE: 'fwpc-e2e-tests' - python-39-e2e: - PYTHON_VERSION: '3.9' - TEST_TYPE: 'e2e-tests' - python-39-deferred-bindings: + STORAGE_CONNECTION: $(LinuxStorageConnectionString38) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString38) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString38) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString38) + SQL_CONNECTION: $(LinuxSqlConnectionString38) + EVENTGRID_URI: $(LinuxEventGridTopicUriString38) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString38) + Python39: PYTHON_VERSION: '3.9' - TEST_TYPE: 'deferred-bindings-e2e-tests' - python-310-fwpc: + STORAGE_CONNECTION: $(LinuxStorageConnectionString39) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString39) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString39) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString39) + SQL_CONNECTION: $(LinuxSqlConnectionString39) + EVENTGRID_URI: $(LinuxEventGridTopicUriString39) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString39) + Python310: PYTHON_VERSION: '3.10' - TEST_TYPE: 'fwpc-e2e-tests' - python-310-e2e: - PYTHON_VERSION: '3.10' - TEST_TYPE: 'e2e-tests' - python-310-deferred-bindings: - PYTHON_VERSION: '3.10' - TEST_TYPE: 'deferred-bindings-e2e-tests' - python-311-fwpc: - PYTHON_VERSION: '3.11' - TEST_TYPE: 'fwpc-e2e-tests' - python-311-e2e: + STORAGE_CONNECTION: $(LinuxStorageConnectionString310) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString310) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString310) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString310) + SQL_CONNECTION: $(LinuxSqlConnectionString310) + EVENTGRID_URI: $(LinuxEventGridTopicUriString310) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString310) + Python311: PYTHON_VERSION: '3.11' - TEST_TYPE: 'e2e-tests' - python-311-deferred-bindings: - PYTHON_VERSION: '3.11' - TEST_TYPE: 'deferred-bindings-e2e-tests' + STORAGE_CONNECTION: $(LinuxStorageConnectionString311) + COSMOSDB_CONNECTION: $(LinuxCosmosDBConnectionString311) + EVENTHUB_CONNECTION: $(LinuxEventHubConnectionString311) + SERVICEBUS_CONNECTION: $(LinuxServiceBusConnectionString311) + SQL_CONNECTION: $(LinuxSqlConnectionString311) + EVENTGRID_URI: $(LinuxEventGridTopicUriString311) + EVENTGRID_CONNECTION: $(LinuxEventGridConnectionKeyString311) steps: - task: UsePythonVersion@0 inputs: @@ -64,92 +64,29 @@ jobs: version: 8.0.x - bash: | python -m pip install --upgrade pip - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple -U azure-functions --pre + python -m pip install -U azure-functions --pre python -m pip install -U -e .[dev] - if [[ "${{ PYTHON_VERSION }}" != "3.7" ]]; then - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-http-v2] + if [[ $(PYTHON_VERSION) != "3.7" ]]; then + python -m pip install --pre -U -e .[test-http-v2] fi - if [[ "${{ PYTHON_VERSION }}" != "3.7" && "${{ PYTHON_VERSION }}" != "3.8" ]]; then - python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple --pre -U -e .[test-deferred-bindings] + if [[ $(PYTHON_VERSION) != "3.7" && $(PYTHON_VERSION) != "3.8" ]]; then + python -m pip install --pre -U -e .[test-deferred-bindings] fi - # Retry a couple times to avoid certificate issue - retry 5 python setup.py build - retry 5 python setup.py webhost --branch-name=dev - retry 5 python setup.py extension + python setup.py build + python setup.py webhost --branch-name=dev + python setup.py extension mkdir logs displayName: 'Install dependencies and the worker' - bash: | - chmod +x .github/Scripts/${{ TEST_TYPE }}.sh - displayName: 'Grant execute permission' - - bash: | - if [[ "${{ PYTHON_VERSION }}" == "3.7" ]]; then - .github/Scripts/${{ TEST_TYPE }}.sh - fi - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString37 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString37 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString37 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString37 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString37 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString37 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString37 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - displayName: "Running 3.7 $(TEST_TYPE) tests" - - bash: | - if [[ "${{ PYTHON_VERSION }}" == "3.8" ]]; then - .github/Scripts/${{ TEST_TYPE }}.sh - fi - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString38 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString38 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString38 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString38 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString38 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString38 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString38 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - displayName: "Running 3.8 $(TEST_TYPE) tests" - - bash: | - if [[ "${{ PYTHON_VERSION }}" == "3.9" ]]; then - .github/Scripts/${{ TEST_TYPE }}.sh - fi - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString39 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString39 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString39 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString39 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString39 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString39 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString39 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - displayName: "Running 3.9 $(TEST_TYPE) tests" - - bash: | - if [[ "${{ PYTHON_VERSION }}" == "3.10" ]]; then - .github/Scripts/${{ TEST_TYPE }}.sh - fi - env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString310 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString310 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString310 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString310 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString310 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString310 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString310 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - displayName: "Running 3.10 $(TEST_TYPE) tests" - - bash: | - if [[ "${{ PYTHON_VERSION }}" == "3.11" ]]; then - .github/Scripts/${{ TEST_TYPE }}.sh - fi + python -m pytest -q -n auto --dist loadfile --reruns 4 --cov=./azure_functions_worker --cov-report xml --cov-branch --cov-append tests/endtoend tests/extension_tests/deferred_bindings_tests tests/extension_tests/http_v2_tests env: - AzureWebJobsStorage: ${{ secrets.LinuxStorageConnectionString311 }} - AzureWebJobsCosmosDBConnectionString: ${{ secrets.LinuxCosmosDBConnectionString311 }} - AzureWebJobsEventHubConnectionString: ${{ secrets.LinuxEventHubConnectionString311 }} - AzureWebJobsServiceBusConnectionString: ${{ secrets.LinuxServiceBusConnectionString311 }} - AzureWebJobsSqlConnectionString: ${{ secrets.LinuxSqlConnectionString311 }} - AzureWebJobsEventGridTopicUri: ${{ secrets.LinuxEventGridTopicUriString311 }} - AzureWebJobsEventGridConnectionKey: ${{ secrets.LinuxEventGridConnectionKeyString311 }} - ARCHIVE_WEBHOST_LOGS: ${{ github.event.inputs.archive_webhost_logging }} - displayName: "Running 3.11 $(TEST_TYPE) tests" + AzureWebJobsStorage: $(STORAGE_CONNECTION) + AzureWebJobsCosmosDBConnectionString: $(COSMOSDB_CONNECTION) + AzureWebJobsEventHubConnectionString: $(EVENTHUB_CONNECTION) + AzureWebJobsServiceBusConnectionString: $(SERVICEBUS_CONNECTION) + AzureWebJobsSqlConnectionString: $(SQL_CONNECTION) + AzureWebJobsEventGridTopicUri: $(EVENTGRID_URI) + AzureWebJobsEventGridConnectionKey: $(EVENTGRID_CONNECTION) + displayName: "Running $(PYTHON_VERSION) Python E2E Tests" diff --git a/eng/templates/official/jobs/ci-lc-tests.yml b/eng/templates/official/jobs/ci-lc-tests.yml new file mode 100644 index 00000000..c3642acc --- /dev/null +++ b/eng/templates/official/jobs/ci-lc-tests.yml @@ -0,0 +1,48 @@ +jobs: + - job: "TestPython" + displayName: "Run Python Linux Consumption Tests" + + pool: + name: 1es-pool-azfunc + image: 1es-ubuntu-22.04 + os: linux + + strategy: + matrix: + Python37: + PYTHON_VERSION: '3.7' + STORAGE_CONNECTION: $(LinuxStorageConnectionString37) + Python38: + PYTHON_VERSION: '3.8' + STORAGE_CONNECTION: $(LinuxStorageConnectionString38) + Python39: + PYTHON_VERSION: '3.9' + STORAGE_CONNECTION: $(LinuxStorageConnectionString39) + Python310: + PYTHON_VERSION: '3.10' + STORAGE_CONNECTION: $(LinuxStorageConnectionString310) + Python311: + PYTHON_VERSION: '3.11' + STORAGE_CONNECTION: $(LinuxStorageConnectionString311) + + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: $(PYTHON_VERSION) + - bash: | + python -m pip install --upgrade pip + python -m pip install -U azure-functions --pre + python -m pip install -U -e .[dev] + + if [[ $(PYTHON_VERSION) != "3.7" ]]; then + python -m pip install --pre -U -e .[test-http-v2] + fi + + python setup.py build + displayName: 'Install dependencies and the worker' + - bash: | + python -m pytest -n auto --dist loadfile -vv --reruns 4 --instafail tests/consumption_tests + env: + AzureWebJobsStorage: $(STORAGE_CONNECTION) + _DUMMY_CONT_KEY: $(_DUMMY_CONT_KEY) + displayName: "Running $(PYTHON_VERSION) Linux Consumption tests" \ No newline at end of file diff --git a/pack/templates/macos_64_env_gen.yml b/pack/templates/macos_64_env_gen.yml index 94c2260a..d871633e 100644 --- a/pack/templates/macos_64_env_gen.yml +++ b/pack/templates/macos_64_env_gen.yml @@ -26,7 +26,3 @@ steps: !grpc_tools/**/* !grpcio_tools*/* targetFolder: '$(Build.ArtifactStagingDirectory)' -- task: PublishBuildArtifacts@1 - inputs: - pathtoPublish: '$(Build.ArtifactStagingDirectory)' - artifactName: ${{ parameters.artifactName }} diff --git a/pack/templates/nix_env_gen.yml b/pack/templates/nix_env_gen.yml index 80597e02..ae253ca3 100644 --- a/pack/templates/nix_env_gen.yml +++ b/pack/templates/nix_env_gen.yml @@ -26,7 +26,3 @@ steps: !grpc_tools/**/* !grpcio_tools*/* targetFolder: '$(Build.ArtifactStagingDirectory)' -- task: PublishBuildArtifacts@1 - inputs: - pathtoPublish: '$(Build.ArtifactStagingDirectory)' - artifactName: ${{ parameters.artifactName }} diff --git a/pack/templates/win_env_gen.yml b/pack/templates/win_env_gen.yml index b0f838a7..1699d393 100644 --- a/pack/templates/win_env_gen.yml +++ b/pack/templates/win_env_gen.yml @@ -26,7 +26,3 @@ steps: !grpc_tools\**\* !grpcio_tools*\* targetFolder: '$(Build.ArtifactStagingDirectory)' -- task: PublishBuildArtifacts@1 - inputs: - pathtoPublish: '$(Build.ArtifactStagingDirectory)' - artifactName: ${{ parameters.artifactName }} diff --git a/tests/endtoend/http_functions/common_libs_functions/common_libs_functions_stein/function_app.py b/tests/endtoend/http_functions/common_libs_functions/common_libs_functions_stein/function_app.py index 77b929ae..69d61981 100644 --- a/tests/endtoend/http_functions/common_libs_functions/common_libs_functions_stein/function_app.py +++ b/tests/endtoend/http_functions/common_libs_functions/common_libs_functions_stein/function_app.py @@ -28,7 +28,7 @@ def dotenv_func(req: func.HttpRequest) -> func.HttpResponse: def numpy_func(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') - res = "array: {}".format(np.array([1, 2], dtype=complex)) + res = "numpy version: {}".format(np.__version__) return func.HttpResponse(res) diff --git a/tests/endtoend/http_functions/common_libs_functions/numpy_func/__init__.py b/tests/endtoend/http_functions/common_libs_functions/numpy_func/__init__.py index 57d5d08e..9b7ffa60 100644 --- a/tests/endtoend/http_functions/common_libs_functions/numpy_func/__init__.py +++ b/tests/endtoend/http_functions/common_libs_functions/numpy_func/__init__.py @@ -9,6 +9,6 @@ def main(req: func.HttpRequest) -> func.HttpResponse: logging.info('Python HTTP trigger function processed a request.') - res = "array: {}".format(np.array([1, 2], dtype=complex)) + res = "numpy version: {}".format(np.__version__) return func.HttpResponse(res) diff --git a/tests/endtoend/test_http_functions.py b/tests/endtoend/test_http_functions.py index f6e6e3a7..2213e0c7 100644 --- a/tests/endtoend/test_http_functions.py +++ b/tests/endtoend/test_http_functions.py @@ -1,11 +1,7 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -import concurrent import os -import sys import typing -import unittest -from concurrent.futures import ThreadPoolExecutor from unittest.mock import patch import requests @@ -155,9 +151,7 @@ def test_numpy(self): r = self.webhost.request('GET', 'numpy_func', timeout=REQUEST_TIMEOUT_SEC) - res = "array: [1.+0.j 2.+0.j]" - - self.assertEqual(r.content.decode("UTF-8"), res) + self.assertIn("numpy version", r.content.decode("UTF-8")) def test_requests(self): r = self.webhost.request('GET', 'requests_func', @@ -214,170 +208,18 @@ class TestHttpFunctionsWithInitIndexing(TestHttpFunctions): @classmethod def setUpClass(cls): + cls.env_variables[PYTHON_ENABLE_INIT_INDEXING] = '1' os.environ[PYTHON_ENABLE_INIT_INDEXING] = "1" super().setUpClass() @classmethod def tearDownClass(cls): - # Remove the PYTHON_SCRIPT_FILE_NAME environment variable os.environ.pop(PYTHON_ENABLE_INIT_INDEXING) super().tearDownClass() - -@unittest.skipIf(sys.version_info.minor <= 7, "Skipping tests <= Python 3.7") -class TestHttpFunctionsV2FastApiWithInitIndexing( - TestHttpFunctionsWithInitIndexing): @classmethod - def get_script_dir(cls): - return testutils.E2E_TESTS_FOLDER / 'http_functions' / \ - 'http_functions_v2' / \ - 'fastapi' - - @testutils.retryable_test(3, 5) - def test_return_streaming(self): - """Test if the return_streaming function returns a streaming - response""" - root_url = self.webhost._addr - streaming_url = f'{root_url}/api/return_streaming' - r = requests.get( - streaming_url, timeout=REQUEST_TIMEOUT_SEC, stream=True) - self.assertTrue(r.ok) - # Validate streaming content - expected_content = [b'First', b' chun', b'k\nSec', b'ond c', b'hunk\n'] - received_content = [] - for chunk in r.iter_content(chunk_size=5): - if chunk: - received_content.append(chunk) - self.assertEqual(received_content, expected_content) - - @testutils.retryable_test(3, 5) - def test_return_streaming_concurrently(self): - """Test if the return_streaming function returns a streaming - response concurrently""" - root_url = self.webhost._addr - streaming_url = f'{root_url}/return_streaming' - - # Function to make a streaming request and validate content - def make_request(): - r = requests.get(streaming_url, timeout=REQUEST_TIMEOUT_SEC, - stream=True) - self.assertTrue(r.ok) - expected_content = [b"First chunk\n", b"Second chunk\n"] - received_content = [] - for chunk in r.iter_content(chunk_size=1024): - if chunk: - received_content.append(chunk) - self.assertEqual(received_content, expected_content) - - # Make concurrent requests - with ThreadPoolExecutor(max_workers=2) as executor: - executor.map(make_request, range(2)) - - @testutils.retryable_test(3, 5) - def test_return_html(self): - """Test if the return_html function returns an HTML response""" - root_url = self.webhost._addr - html_url = f'{root_url}/api/return_html' - r = requests.get(html_url, timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual(r.headers['content-type'], - 'text/html; charset=utf-8') - # Validate HTML content - expected_html = "

Hello, World!

" - self.assertEqual(r.text, expected_html) - - @testutils.retryable_test(3, 5) - def test_return_ujson(self): - """Test if the return_ujson function returns a UJSON response""" - root_url = self.webhost._addr - ujson_url = f'{root_url}/api/return_ujson' - r = requests.get(ujson_url, timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual(r.headers['content-type'], 'application/json') - self.assertEqual(r.text, '{"message":"Hello, World!"}') - - @testutils.retryable_test(3, 5) - def test_return_orjson(self): - """Test if the return_orjson function returns an ORJSON response""" - root_url = self.webhost._addr - orjson_url = f'{root_url}/api/return_orjson' - r = requests.get(orjson_url, timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertEqual(r.headers['content-type'], 'application/json') - self.assertEqual(r.text, '{"message":"Hello, World!"}') - - @testutils.retryable_test(3, 5) - def test_return_file(self): - """Test if the return_file function returns a file response""" - root_url = self.webhost._addr - file_url = f'{root_url}/api/return_file' - r = requests.get(file_url, timeout=REQUEST_TIMEOUT_SEC) - self.assertTrue(r.ok) - self.assertIn('@app.route(route="default_template")', r.text) - - @testutils.retryable_test(3, 5) - def test_upload_data_stream(self): - """Test if the upload_data_stream function receives streaming data - and returns the complete data""" - root_url = self.webhost._addr - upload_url = f'{root_url}/api/upload_data_stream' - - # Define the streaming data - data_chunks = [b"First chunk\n", b"Second chunk\n"] - - # Define a function to simulate streaming by reading from an - # iterator - def stream_data(data_chunks): - for chunk in data_chunks: - yield chunk - - # Send a POST request with streaming data - r = requests.post(upload_url, data=stream_data(data_chunks)) - - # Assert that the request was successful - self.assertTrue(r.ok) - - # Assert that the response content matches the concatenation of - # all data chunks - complete_data = b"".join(data_chunks) - self.assertEqual(r.content, complete_data) - - @testutils.retryable_test(3, 5) - def test_upload_data_stream_concurrently(self): - """Test if the upload_data_stream function receives streaming data - and returns the complete data""" - root_url = self.webhost._addr - upload_url = f'{root_url}/api/upload_data_stream' - - # Define the streaming data - data_chunks = [b"First chunk\n", b"Second chunk\n"] - - # Define a function to simulate streaming by reading from an - # iterator - def stream_data(data_chunks): - for chunk in data_chunks: - yield chunk - - # Define the number of concurrent requests - num_requests = 5 - - # Define a function to send a single request - def send_request(): - r = requests.post(upload_url, data=stream_data(data_chunks)) - return r.ok, r.content - - # Send multiple requests concurrently - with concurrent.futures.ThreadPoolExecutor() as executor: - futures = [executor.submit(send_request) for _ in - range(num_requests)] - - # Assert that all requests were successful and the response - # contents are correct - for future in concurrent.futures.as_completed(futures): - ok, content = future.result() - self.assertTrue(ok) - complete_data = b"".join(data_chunks) - self.assertEqual(content, complete_data) + def get_environment_variables(cls): + return cls.env_variables class TestUserThreadLoggingHttpFunctions(testutils.WebHostTestCase): diff --git a/tests/endtoend/test_worker_process_count_functions.py b/tests/endtoend/test_worker_process_count_functions.py index 654dc643..e3cd3df6 100644 --- a/tests/endtoend/test_worker_process_count_functions.py +++ b/tests/endtoend/test_worker_process_count_functions.py @@ -22,11 +22,12 @@ def setUpClass(cls): super().setUpClass() - def tearDown(self): + @classmethod + def tearDownClass(cls): os.environ.pop('PYTHON_THREADPOOL_THREAD_COUNT') os.environ.pop('FUNCTIONS_WORKER_PROCESS_COUNT') - super().tearDown() + super().tearDownClass() @classmethod def get_script_dir(cls): diff --git a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_blob_functions/function_app.py b/tests/extension_tests/deferred_bindings_tests/deferred_bindings_blob_functions/function_app.py index 5db75fd1..1a8062aa 100644 --- a/tests/extension_tests/deferred_bindings_tests/deferred_bindings_blob_functions/function_app.py +++ b/tests/extension_tests/deferred_bindings_tests/deferred_bindings_blob_functions/function_app.py @@ -250,13 +250,13 @@ def put_blob_bytes(req: func.HttpRequest, file: func.Out[bytes]) -> str: @app.function_name(name="blob_cache") -@app.blob_input(arg_name="client", +@app.blob_input(arg_name="cachedClient", path="python-worker-tests/test-blobclient-triggered.txt", connection="AzureWebJobsStorage") @app.route(route="blob_cache") def blob_cache(req: func.HttpRequest, - client: blob.BlobClient) -> str: - return client.download_blob(encoding='utf-8').readall() + cachedClient: blob.BlobClient) -> str: + return cachedClient.download_blob(encoding='utf-8').readall() @app.function_name(name="invalid_connection_info") diff --git a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings_blob_functions.py b/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings_blob_functions.py index eaa1bc72..da60861f 100644 --- a/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings_blob_functions.py +++ b/tests/extension_tests/deferred_bindings_tests/test_deferred_bindings_blob_functions.py @@ -17,6 +17,10 @@ def get_script_dir(cls): return testutils.EXTENSION_TESTS_FOLDER / 'deferred_bindings_tests' / \ 'deferred_bindings_blob_functions' + @classmethod + def get_libraries_to_install(cls): + return ['azurefunctions-extensions-bindings-blob'] + def test_blob_str(self): r = self.webhost.request('POST', 'put_blob_str', data='test-data') self.assertEqual(r.status_code, 200) diff --git a/tests/endtoend/http_functions/http_functions_v2/fastapi/function_app.py b/tests/extension_tests/http_v2_tests/http_functions_v2/fastapi/function_app.py similarity index 100% rename from tests/endtoend/http_functions/http_functions_v2/fastapi/function_app.py rename to tests/extension_tests/http_v2_tests/http_functions_v2/fastapi/function_app.py diff --git a/tests/extension_tests/http_v2_tests/test_http_v2.py b/tests/extension_tests/http_v2_tests/test_http_v2.py new file mode 100644 index 00000000..022ffa97 --- /dev/null +++ b/tests/extension_tests/http_v2_tests/test_http_v2.py @@ -0,0 +1,189 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +import concurrent +import os +import sys +import unittest +from concurrent.futures import ThreadPoolExecutor + +import requests + +from azure_functions_worker.constants import PYTHON_ENABLE_INIT_INDEXING +from tests.utils import testutils + +REQUEST_TIMEOUT_SEC = 5 + + +@unittest.skipIf(sys.version_info.minor < 8, "HTTPv2" + "is only supported for 3.8+.") +class TestHttpFunctionsWithInitIndexing(testutils.WebHostTestCase): + @classmethod + def setUpClass(cls): + cls.env_variables[PYTHON_ENABLE_INIT_INDEXING] = '1' + os.environ[PYTHON_ENABLE_INIT_INDEXING] = "1" + super().setUpClass() + + @classmethod + def tearDownClass(cls): + os.environ.pop(PYTHON_ENABLE_INIT_INDEXING) + super().tearDownClass() + + @classmethod + def get_environment_variables(cls): + return cls.env_variables + + @classmethod + def get_script_dir(cls): + return testutils.EXTENSION_TESTS_FOLDER / 'http_v2_tests' / \ + 'http_functions_v2' / \ + 'fastapi' + + @classmethod + def get_libraries_to_install(cls): + return ['azurefunctions-extensions-http-fastapi', 'orjson', 'ujson'] + + @testutils.retryable_test(3, 5) + def test_return_streaming(self): + """Test if the return_streaming function returns a streaming + response""" + root_url = self.webhost._addr + streaming_url = f'{root_url}/api/return_streaming' + r = requests.get( + streaming_url, timeout=REQUEST_TIMEOUT_SEC, stream=True) + self.assertTrue(r.ok) + # Validate streaming content + expected_content = [b'First', b' chun', b'k\nSec', b'ond c', b'hunk\n'] + received_content = [] + for chunk in r.iter_content(chunk_size=5): + if chunk: + received_content.append(chunk) + self.assertEqual(received_content, expected_content) + + @testutils.retryable_test(3, 5) + def test_return_streaming_concurrently(self): + """Test if the return_streaming function returns a streaming + response concurrently""" + root_url = self.webhost._addr + streaming_url = f'{root_url}/return_streaming' + + # Function to make a streaming request and validate content + def make_request(): + r = requests.get(streaming_url, timeout=REQUEST_TIMEOUT_SEC, + stream=True) + self.assertTrue(r.ok) + expected_content = [b"First chunk\n", b"Second chunk\n"] + received_content = [] + for chunk in r.iter_content(chunk_size=1024): + if chunk: + received_content.append(chunk) + self.assertEqual(received_content, expected_content) + + # Make concurrent requests + with ThreadPoolExecutor(max_workers=2) as executor: + executor.map(make_request, range(2)) + + @testutils.retryable_test(3, 5) + def test_return_html(self): + """Test if the return_html function returns an HTML response""" + root_url = self.webhost._addr + html_url = f'{root_url}/api/return_html' + r = requests.get(html_url, timeout=REQUEST_TIMEOUT_SEC) + self.assertTrue(r.ok) + self.assertEqual(r.headers['content-type'], + 'text/html; charset=utf-8') + # Validate HTML content + expected_html = "

Hello, World!

" + self.assertEqual(r.text, expected_html) + + @testutils.retryable_test(3, 5) + def test_return_ujson(self): + """Test if the return_ujson function returns a UJSON response""" + root_url = self.webhost._addr + ujson_url = f'{root_url}/api/return_ujson' + r = requests.get(ujson_url, timeout=REQUEST_TIMEOUT_SEC) + self.assertTrue(r.ok) + self.assertEqual(r.headers['content-type'], 'application/json') + self.assertEqual(r.text, '{"message":"Hello, World!"}') + + @testutils.retryable_test(3, 5) + def test_return_orjson(self): + """Test if the return_orjson function returns an ORJSON response""" + root_url = self.webhost._addr + orjson_url = f'{root_url}/api/return_orjson' + r = requests.get(orjson_url, timeout=REQUEST_TIMEOUT_SEC) + self.assertTrue(r.ok) + self.assertEqual(r.headers['content-type'], 'application/json') + self.assertEqual(r.text, '{"message":"Hello, World!"}') + + @testutils.retryable_test(3, 5) + def test_return_file(self): + """Test if the return_file function returns a file response""" + root_url = self.webhost._addr + file_url = f'{root_url}/api/return_file' + r = requests.get(file_url, timeout=REQUEST_TIMEOUT_SEC) + self.assertTrue(r.ok) + self.assertIn('@app.route(route="default_template")', r.text) + + @testutils.retryable_test(3, 5) + def test_upload_data_stream(self): + """Test if the upload_data_stream function receives streaming data + and returns the complete data""" + root_url = self.webhost._addr + upload_url = f'{root_url}/api/upload_data_stream' + + # Define the streaming data + data_chunks = [b"First chunk\n", b"Second chunk\n"] + + # Define a function to simulate streaming by reading from an + # iterator + def stream_data(data_chunks): + for chunk in data_chunks: + yield chunk + + # Send a POST request with streaming data + r = requests.post(upload_url, data=stream_data(data_chunks)) + + # Assert that the request was successful + self.assertTrue(r.ok) + + # Assert that the response content matches the concatenation of + # all data chunks + complete_data = b"".join(data_chunks) + self.assertEqual(r.content, complete_data) + + @testutils.retryable_test(3, 5) + def test_upload_data_stream_concurrently(self): + """Test if the upload_data_stream function receives streaming data + and returns the complete data""" + root_url = self.webhost._addr + upload_url = f'{root_url}/api/upload_data_stream' + + # Define the streaming data + data_chunks = [b"First chunk\n", b"Second chunk\n"] + + # Define a function to simulate streaming by reading from an + # iterator + def stream_data(data_chunks): + for chunk in data_chunks: + yield chunk + + # Define the number of concurrent requests + num_requests = 5 + + # Define a function to send a single request + def send_request(): + r = requests.post(upload_url, data=stream_data(data_chunks)) + return r.ok, r.content + + # Send multiple requests concurrently + with concurrent.futures.ThreadPoolExecutor() as executor: + futures = [executor.submit(send_request) for _ in + range(num_requests)] + + # Assert that all requests were successful and the response + # contents are correct + for future in concurrent.futures.as_completed(futures): + ok, content = future.result() + self.assertTrue(ok) + complete_data = b"".join(data_chunks) + self.assertEqual(content, complete_data) diff --git a/tests/unittests/test_http_functions_v2.py b/tests/unittests/test_http_functions_v2.py index 45428a6b..f0849249 100644 --- a/tests/unittests/test_http_functions_v2.py +++ b/tests/unittests/test_http_functions_v2.py @@ -94,6 +94,7 @@ def check_log_async_logging(self, host_out: typing.List[str]): self.assertIn('hello info', host_out) self.assertIn('and another error', host_out) + @unittest.skipIf(sys.version_info.minor >= 7, "Skipping for ADO") def test_debug_logging(self): r = self.webhost.request('GET', 'debug_logging') self.assertEqual(r.status_code, 200) @@ -105,6 +106,7 @@ def check_log_debug_logging(self, host_out: typing.List[str]): self.assertIn('logging error', host_out) self.assertNotIn('logging debug', host_out) + @unittest.skipIf(sys.version_info.minor >= 7, "Skipping for ADO") def test_debug_with_user_logging(self): r = self.webhost.request('GET', 'debug_user_logging') self.assertEqual(r.status_code, 200) diff --git a/tests/unittests/test_loader.py b/tests/unittests/test_loader.py index edc341d1..6b48f692 100644 --- a/tests/unittests/test_loader.py +++ b/tests/unittests/test_loader.py @@ -6,6 +6,7 @@ import subprocess import sys import textwrap +from unittest import skipIf from unittest.mock import Mock, patch from azure.functions import Function @@ -205,6 +206,7 @@ def check_log_loader_module_not_found(self, host_out): class TestPluginLoader(testutils.AsyncTestCase): + @skipIf(sys.version_info.minor <= 7, "Skipping tests <= Python 3.7") async def test_entry_point_plugin(self): test_binding = pathlib.Path(__file__).parent / 'test-binding' subprocess.run([ diff --git a/tests/unittests/test_logging.py b/tests/unittests/test_logging.py index 1a685233..b7c4f5f4 100644 --- a/tests/unittests/test_logging.py +++ b/tests/unittests/test_logging.py @@ -56,5 +56,5 @@ def raising_function(): self.assertIn("call1", processed_exception) self.assertIn("call2", processed_exception) self.assertIn("f", processed_exception) - self.assertIn("tests/unittests/test_logging.py", - processed_exception) + self.assertRegex(processed_exception, + r".*tests\\unittests\\test_logging.py.*") diff --git a/tests/utils/testutils_docker.py b/tests/utils/testutils_docker.py index 2fe69074..484cfd24 100644 --- a/tests/utils/testutils_docker.py +++ b/tests/utils/testutils_docker.py @@ -132,21 +132,23 @@ def create_container(self, image_repo: str, image_url: str, ) function_path = "/home/site/wwwroot" - - if configs.libraries: - install_libraries_cmd = [] - install_libraries_cmd.extend(['pip', 'install']) - install_libraries_cmd.extend(configs.libraries) - install_libraries_cmd.extend(['-t', - f'{script_path}/{_libraries_path}']) - - install_libraries_process = \ - subprocess.run(args=install_libraries_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - - if install_libraries_process.returncode != 0: - raise RuntimeError('Failed to install libraries') + configs.libraries = ((configs.libraries or []) + + ['azurefunctions-extensions-base']) + install_libraries_cmd = [] + install_libraries_cmd.extend(['pip', 'install']) + install_libraries_cmd.extend(['--platform=manylinux2014_x86_64']) + install_libraries_cmd.extend(configs.libraries) + install_libraries_cmd.extend(['-t', + f'{script_path}/{_libraries_path}']) + install_libraries_cmd.extend(['--only-binary=:all:']) + + install_libraries_process = \ + subprocess.run(args=install_libraries_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + if install_libraries_process.returncode != 0: + raise RuntimeError('Failed to install libraries') run_cmd = [] run_cmd.extend([_docker_cmd, "run", "-p", "0:80", "-d"]) diff --git a/tests/utils/testutils_lc.py b/tests/utils/testutils_lc.py index 0dfd473f..0f663819 100644 --- a/tests/utils/testutils_lc.py +++ b/tests/utils/testutils_lc.py @@ -27,7 +27,6 @@ _DOCKER_DEFAULT_PATH = "docker" _MESH_IMAGE_URL = "https://mcr.microsoft.com/v2/azure-functions/mesh/tags/list" _MESH_IMAGE_REPO = "mcr.microsoft.com/azure-functions/mesh" -_DUMMY_CONT_KEY = "MDEyMzQ1Njc4OUFCQ0RFRjAxMjM0NTY3ODlBQkNERUY=" _FUNC_GITHUB_ZIP = "https://github.com/Azure/azure-functions-python-library" \ "/archive/refs/heads/dev.zip" _FUNC_FILE_NAME = "azure-functions-python-library-dev" @@ -198,7 +197,8 @@ def spawn_container(self, run_cmd.extend(["--cap-add", "SYS_ADMIN"]) run_cmd.extend(["--device", "/dev/fuse"]) run_cmd.extend(["-e", f"CONTAINER_NAME={self._uuid}"]) - run_cmd.extend(["-e", f"CONTAINER_ENCRYPTION_KEY={_DUMMY_CONT_KEY}"]) + run_cmd.extend(["-e", + f"CONTAINER_ENCRYPTION_KEY={os.getenv('_DUMMY_CONT_KEY')}"]) run_cmd.extend(["-e", "WEBSITE_PLACEHOLDER_MODE=1"]) run_cmd.extend(["-v", f'{worker_path}:{container_worker_path}']) run_cmd.extend(["-v", @@ -266,7 +266,7 @@ def _get_site_restricted_token(cls) -> str: which expires in one day. """ exp_ns = int(time.time() + 24 * 60 * 60) * 1000000000 - return cls._encrypt_context(_DUMMY_CONT_KEY, f'exp={exp_ns}') + return cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), f'exp={exp_ns}') @classmethod def _get_site_encrypted_context(cls, @@ -281,7 +281,7 @@ def _get_site_encrypted_context(cls, # Ensure WEBSITE_SITE_NAME is set to simulate production mode ctx["Environment"]["WEBSITE_SITE_NAME"] = site_name - return cls._encrypt_context(_DUMMY_CONT_KEY, json.dumps(ctx)) + return cls._encrypt_context(os.getenv('_DUMMY_CONT_KEY'), json.dumps(ctx)) @classmethod def _encrypt_context(cls, encryption_key: str, plain_text: str) -> str: