Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into decide_worker/ignor…
Browse files Browse the repository at this point in the history
…e-common-deps
  • Loading branch information
gjoseph92 committed May 5, 2022
2 parents d0f0955 + 7bd6442 commit fd7e790
Show file tree
Hide file tree
Showing 284 changed files with 35,418 additions and 16,459 deletions.
25 changes: 18 additions & 7 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -1,19 +1,30 @@
[run]
include =
distributed/*
source =
distributed
omit =
distributed/tests/test*
distributed/hdfs.py
distributed/cluster.py
distributed/*/tests/test*
distributed/compatibility.py
distributed/cli/utils.py
distributed/utils_test.py
distributed/cli/dask_spec.py
distributed/deploy/ssh.py
distributed/_ipython_utils.py
distributed/_version.py
distributed/pytest_resourceleaks.py
distributed/comm/ucx.py

[report]
show_missing = True
exclude_lines =
# re-enable the standard pragma
pragma: nocover
pragma: no cover
# exclude nvml calls
[\s(.]nvml[\s(.]
[\s(.]pynvml[\s(.]
# exclude LOG_PDB
LOG_PDB
# always ignore type checking blocks
TYPE_CHECKING
except ImportError

[html]
directory = coverage_html_report
Expand Down
5 changes: 3 additions & 2 deletions .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
- [ ] Closes #xxxx
Closes #xxxx

- [ ] Tests added / passed
- [ ] Passes `black distributed` / `flake8 distributed` / `isort distributed`
- [ ] Passes `pre-commit run --all-files`
77 changes: 77 additions & 0 deletions .github/workflows/conda.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
name: Conda build
on:
push:
branches:
- main
pull_request:
paths:
- setup.py
- requirements.txt
- continuous_integration/recipes/**
- .github/workflows/conda.yml

# When this workflow is queued, automatically cancel any previous running
# or pending jobs from the same branch
concurrency:
group: conda-${{ github.head_ref }}
cancel-in-progress: true

# Required shell entrypoint to have properly activated conda environments
defaults:
run:
shell: bash -l {0}

jobs:
conda:
name: Build (and upload)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: 3.8
- name: Install dependencies
run: |
mamba install boa conda-verify
which python
pip list
mamba list
- name: Build conda packages
run: |
# suffix for pre-release package versions
export VERSION_SUFFIX=a`date +%y%m%d`
# conda search for the latest dask-core pre-release
arr=($(conda search --override-channels -c dask/label/dev dask-core | tail -n 1))
# extract dask-core pre-release version / build
export DASK_CORE_VERSION=${arr[1]}
# distributed pre-release build
conda mambabuild continuous_integration/recipes/distributed \
--channel dask/label/dev \
--no-anaconda-upload \
--output-folder .
# dask pre-release build
conda mambabuild continuous_integration/recipes/dask \
--channel dask/label/dev \
--no-anaconda-upload \
--output-folder .
- name: Upload conda packages
if: |
github.event_name == 'push'
&& github.ref == 'refs/heads/main'
&& github.repository == 'dask/distributed'
env:
ANACONDA_API_TOKEN: ${{ secrets.DASK_CONDA_TOKEN }}
run: |
# install anaconda for upload
mamba install anaconda-client
anaconda upload --label dev noarch/*.tar.bz2
45 changes: 45 additions & 0 deletions .github/workflows/publish-test-results.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# Copied from https://github.com/EnricoMi/publish-unit-test-result-action/blob/v1.23/README.md#support-fork-repositories-and-dependabot-branches
# Warning: changes to this workflow will NOT be picked up until they land in the main branch!
# See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run

name: Publish test results

on:
workflow_run:
workflows: [Tests]
types: [completed]

jobs:
publish-test-results:
name: Publish test results
runs-on: ubuntu-latest
if: github.event.workflow_run.conclusion != 'skipped'

# Needed to post comments on the PR
permissions:
checks: write
pull-requests: write

steps:
- name: Download and extract artifacts
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
run: |
mkdir artifacts && cd artifacts
artifacts_url=${{ github.event.workflow_run.artifacts_url }}
gh api "$artifacts_url" -q '.artifacts[] | [.name, .archive_download_url] | @tsv' | while read artifact
do
IFS=$'\t' read name url <<< "$artifact"
gh api $url > "$name.zip"
unzip -d "$name" "$name.zip"
done
- name: Publish Unit Test Results
uses: EnricoMi/publish-unit-test-result-action@v1
with:
commit: ${{ github.event.workflow_run.head_sha }}
event_file: artifacts/Event File/event.json
event_name: ${{ github.event.workflow_run.event }}
files: artifacts/**/*.xml
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
name: Debug passwordless `ssh localhost`

on: []
# on: [pull_request] # Uncomment to enable
on: [push, pull_request]

jobs:
test:
Expand Down
51 changes: 51 additions & 0 deletions .github/workflows/test-report.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
name: Test Report

on:
schedule:
# Run 2h after the daily tests.yaml
- cron: "0 8,20 * * *"
workflow_dispatch:

jobs:
test-report:
name: Test Report
# Do not run the report job on forks
if: github.repository == 'dask/distributed' || github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ github.token }}
steps:
- uses: actions/checkout@v2

- name: Setup Conda Environment
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
miniforge-version: latest
condarc-file: continuous_integration/condarc
use-mamba: true
python-version: 3.9
environment-file: continuous_integration/scripts/test-report-environment.yml
activate-environment: dask-distributed

- name: Show conda options
shell: bash -l {0}
run: conda config --show

- name: mamba list
shell: bash -l {0}
run: mamba list

- name: Generate report
shell: bash -l {0}
run: |
python continuous_integration/scripts/test_report.py --days 90 --nfails 1 -o test_report.html
python continuous_integration/scripts/test_report.py --days 7 --nfails 2 -o test_short_report.html
mkdir deploy
mv test_report.html test_short_report.html deploy/
- name: Deploy 🚀
uses: JamesIves/github-pages-deploy-action@4.1.7
with:
branch: gh-pages
folder: deploy
96 changes: 71 additions & 25 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
name: Tests

on: [push, pull_request]
on:
push:
pull_request:
schedule:
- cron: "0 6,18 * * *"

# When this workflow is queued, automatically cancel any previous running
# or pending jobs from the same branch
Expand All @@ -10,16 +14,26 @@ concurrency:

jobs:
test:
# Do not run the schedule job on forks
if: github.repository == 'dask/distributed' || github.event_name != 'schedule'
runs-on: ${{ matrix.os }}
timeout-minutes: 180
timeout-minutes: 120

strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: ["3.7", "3.8", "3.9"]
python-version: ["3.8", "3.9", "3.10"]
# Cherry-pick test modules to split the overall runtime roughly in half
partition: [ci1, not ci1]
exclude:
- os: macos-latest
python-version: 3.9
include:
- partition: "ci1"
partition-label: "ci1"
- partition: "not ci1"
partition-label: "notci1"

# Uncomment to stress-test the test suite for random failures.
# Must also change env.TEST_ID below.
Expand All @@ -30,8 +44,8 @@ jobs:
# run: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]

env:
TEST_ID: ${{ matrix.os }}-${{ matrix.python-version }}
# TEST_ID: ${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.run }}
TEST_ID: ${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.partition-label }}
# TEST_ID: ${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.partition-label }}-${{ matrix.run }}

steps:
- name: Checkout source
Expand All @@ -54,12 +68,6 @@ jobs:
shell: bash -l {0}
run: conda config --show

- name: Install stacktrace
shell: bash -l {0}
# stacktrace for Python 3.8 has not been released at the moment of writing
if: ${{ matrix.os == 'ubuntu-latest' && matrix.python-version < '3.8' }}
run: mamba install -c conda-forge -c defaults -c numba libunwind stacktrace

- name: Hack around https://github.com/ipython/ipython/issues/12197
# This upstream issue causes an interpreter crash when running
# distributed/protocol/tests/test_serialize.py::test_profile_nested_sizeof
Expand All @@ -70,14 +78,7 @@ jobs:
- name: Install
shell: bash -l {0}
run: |
# Cythonize scheduler on Python 3.7 builds
if [[ "${{ matrix.python-version }}" = "3.7" ]]; then
python -m pip install -vv --no-deps --install-option="--with-cython" -e .
python -c "from distributed.scheduler import COMPILED; assert COMPILED"
else
python -m pip install --no-deps -e .
python -c "from distributed.scheduler import COMPILED; assert not COMPILED"
fi
python -m pip install --no-deps -e .
- name: mamba list
shell: bash -l {0}
Expand All @@ -98,8 +99,8 @@ jobs:

- name: Reconfigure pytest-timeout
shell: bash -l {0}
# No SIGALRM available on Windows. On MacOS, it kills the whole test suite.
if: ${{ matrix.os == 'ubuntu-latest' }}
# No SIGALRM available on Windows
if: ${{ matrix.os != 'windows-latest' }}
run: sed -i.bak 's/timeout_method = thread/timeout_method = signal/' setup.cfg

- name: Test
Expand All @@ -113,16 +114,38 @@ jobs:
# https://github.com/dask/distributed/issues/4514
export DISABLE_IPV6=1
fi
source continuous_integration/scripts/set_ulimit.sh
pytest distributed -m "not avoid_ci and ${{ matrix.partition }}" --runslow \
--junitxml reports/pytest.xml -o junit_suite_name=$TEST_ID
source continuous_integration/scripts/set_ulimit.sh
set -o pipefail
mkdir reports
pytest distributed \
-m "not avoid_ci and ${{ matrix.partition }}" --runslow \
--leaks=fds,processes,threads \
--junitxml reports/pytest.xml -o junit_suite_name=$TEST_ID \
--cov=distributed --cov-report=xml \
| tee reports/stdout
- name: Generate junit XML report in case of pytest-timeout
if: ${{ failure() }}
shell: bash -l {0}
run: |
if [ ! -e reports/pytest.xml ]
then
# This should only ever happen on Windows.
# On Linux and MacOS, pytest-timeout kills off the individual tests
# See (reconfigure pytest-timeout above)
python continuous_integration/scripts/parse_stdout.py < reports/stdout > reports/pytest.xml
fi
# - name: Debug with tmate on failure
# if: ${{ failure() }}
# uses: mxschmitt/action-tmate@v3

- name: Upload test artifacts
- name: Coverage
uses: codecov/codecov-action@v1

- name: Upload test results
# ensure this runs even if pytest fails
if: >
always() &&
Expand All @@ -131,3 +154,26 @@ jobs:
with:
name: ${{ env.TEST_ID }}
path: reports
- name: Upload gen_cluster dumps for failed tests
# ensure this runs even if pytest fails
if: >
always() &&
(steps.run_tests.outcome == 'success' || steps.run_tests.outcome == 'failure')
uses: actions/upload-artifact@v2
with:
name: ${{ env.TEST_ID }}_cluster_dumps
path: test_cluster_dump
if-no-files-found: ignore

# Publish an artifact for the event; used by publish-test-results.yaml
event_file:
# Do not run the schedule job on forks
if: github.repository == 'dask/distributed' || github.event_name != 'schedule'
name: "Event File"
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v2
with:
name: Event File
path: ${{ github.event_path }}
Loading

0 comments on commit fd7e790

Please sign in to comment.