fix a bug in tile_processing procedure; change default b_c2 to True #173
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions | |
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions | |
# Based on ~/code/xcookie/xcookie/rc/tests.yml.in | |
# Now based on ~/code/xcookie/xcookie/builders/github_actions.py | |
# See: https://github.com/Erotemic/xcookie | |
name: BinPyCI | |
on: | |
push: | |
pull_request: | |
branches: [ devel, main ] | |
jobs: | |
lint_job: | |
## | |
# Run quick linting and typing checks. | |
# To disable all linting add "linter=false" to the xcookie config. | |
# To disable type checks add "notypes" to the xcookie tags. | |
## | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout source | |
uses: actions/checkout@v3 | |
- name: Set up Python 3.11 for linting | |
uses: actions/setup-python@v4.6.1 | |
with: | |
python-version: '3.11' | |
- name: Install dependencies | |
run: |- | |
python -m pip install --upgrade pip | |
python -m pip install flake8 | |
- name: Lint with flake8 | |
run: |- | |
# stop the build if there are Python syntax errors or undefined names | |
flake8 ./src/python/pycold --count --select=E9,F63,F7,F82 --show-source --statistics | |
- name: Typecheck with mypy | |
run: |- | |
python -m pip install mypy | |
mypy --install-types --non-interactive ./src/python/pycold | |
mypy ./src/python/pycold | |
build_and_test_sdist: | |
## | |
# Build the binary package from source and test it in the same | |
# environment. | |
## | |
name: Build sdist | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout source | |
uses: actions/checkout@v3 | |
- name: Set up Python 3.11 | |
uses: actions/setup-python@v4.6.1 | |
with: | |
python-version: '3.11' | |
- name: Upgrade pip | |
run: |- | |
python -m pip install --upgrade pip | |
python -m pip install --prefer-binary -r requirements/tests.txt | |
python -m pip install --prefer-binary -r requirements/runtime.txt | |
python -m pip install --prefer-binary -r requirements/headless.txt | |
python -m pip install --prefer-binary -r requirements/gdal.txt | |
- name: Build sdist | |
shell: bash | |
run: |- | |
python -m pip install pip -U | |
python -m pip install setuptools>=0.8 wheel build | |
python -m build --sdist --outdir wheelhouse | |
- name: Install sdist | |
run: |- | |
ls -al ./wheelhouse | |
pip install --prefer-binary wheelhouse/pycold*.tar.gz -v | |
- name: Test minimal loose sdist | |
run: |- | |
pwd | |
ls -al | |
# Run in a sandboxed directory | |
WORKSPACE_DNAME="testsrcdir_minimal_${CI_PYTHON_VERSION}_${GITHUB_RUN_ID}_${RUNNER_OS}" | |
mkdir -p $WORKSPACE_DNAME | |
cd $WORKSPACE_DNAME | |
# Run the tests | |
# Get path to installed package | |
MOD_DPATH=$(python -c "import pycold, os; print(os.path.dirname(pycold.__file__))") | |
echo "MOD_DPATH = $MOD_DPATH" | |
python -m pytest --verbose --cov={self.mod_name} $MOD_DPATH ../tests | |
cd .. | |
- name: Test full loose sdist | |
run: |- | |
pwd | |
ls -al | |
python -m pip install --prefer-binary -r requirements/headless.txt | |
# Run in a sandboxed directory | |
WORKSPACE_DNAME="testsrcdir_full_${CI_PYTHON_VERSION}_${GITHUB_RUN_ID}_${RUNNER_OS}" | |
mkdir -p $WORKSPACE_DNAME | |
cd $WORKSPACE_DNAME | |
# Run the tests | |
# Get path to installed package | |
MOD_DPATH=$(python -c "import pycold, os; print(os.path.dirname(pycold.__file__))") | |
echo "MOD_DPATH = $MOD_DPATH" | |
python -m pytest --verbose --cov={self.mod_name} $MOD_DPATH ../tests | |
cd .. | |
- name: Upload sdist artifact | |
uses: actions/upload-artifact@v3 | |
with: | |
name: wheels | |
path: ./wheelhouse/*.tar.gz | |
build_binpy_wheels: | |
## | |
# Build the binary wheels. Note: even though cibuildwheel will test | |
# them internally here, we will test them independently later in the | |
# test_binpy_wheels step. | |
## | |
name: ${{ matrix.os }}, arch=${{ matrix.arch }} | |
runs-on: ${{ matrix.os }} | |
strategy: | |
matrix: | |
# Normally, xcookie generates explicit lists of platforms to build / test | |
# on, but in this case cibuildwheel does that for us, so we need to just | |
# set the environment variables for cibuildwheel. These are parsed out of | |
# the standard [tool.cibuildwheel] section in pyproject.toml and set | |
# explicitly here. | |
os: | |
- ubuntu-latest | |
cibw_skip: | |
- '*-win32 *-musllinux_* *i686 pp*' | |
arch: | |
- auto | |
steps: | |
- name: Checkout source | |
uses: actions/checkout@v3 | |
- name: Set up QEMU | |
uses: docker/setup-qemu-action@v2 | |
if: runner.os == 'Linux' && matrix.arch != 'auto' | |
with: | |
platforms: all | |
- name: Build binary wheels | |
uses: pypa/cibuildwheel@v2.13.1 | |
with: | |
output-dir: wheelhouse | |
config-file: pyproject.toml | |
env: | |
CIBW_SKIP: ${{ matrix.cibw_skip }} | |
CIBW_ARCHS_LINUX: ${{ matrix.arch }} | |
- name: Show built files | |
shell: bash | |
run: ls -la wheelhouse | |
- name: Set up Python 3.11 to combine coverage | |
uses: actions/setup-python@v4.6.1 | |
if: runner.os == 'Linux' | |
with: | |
python-version: '3.11' | |
- name: Combine coverage Linux | |
if: runner.os == 'Linux' | |
run: |- | |
echo '############ PWD' | |
pwd | |
cp .wheelhouse/.coverage* . || true | |
ls -al | |
python -m pip install coverage[toml] | |
echo '############ combine' | |
coverage combine . || true | |
echo '############ XML' | |
coverage xml -o ./coverage.xml || true | |
echo '### The cwd should now have a coverage.xml' | |
ls -altr | |
pwd | |
- uses: codecov/codecov-action@v3 | |
name: Codecov Upload | |
with: | |
file: ./coverage.xml | |
- uses: actions/upload-artifact@v3 | |
name: Upload wheels artifact | |
with: | |
name: wheels | |
path: ./wheelhouse/pycold*.whl | |
test_binpy_wheels: | |
## | |
# Download the previously build binary wheels from the | |
# build_binpy_wheels step, and test them in an independent | |
# environment. | |
## | |
name: ${{ matrix.python-version }} on ${{ matrix.os }}, arch=${{ matrix.arch }} with ${{ matrix.install-extras }} | |
runs-on: ${{ matrix.os }} | |
needs: | |
- build_binpy_wheels | |
strategy: | |
matrix: | |
# Xcookie generates an explicit list of environments that will be used | |
# for testing instead of using the more concise matrix notation. | |
include: | |
- python-version: '3.8' | |
install-extras: tests-strict,runtime-strict,headless-strict | |
os: ubuntu-latest | |
arch: auto | |
gdal-requirement-txt: requirements/gdal-strict.txt | |
- python-version: '3.11' | |
install-extras: tests-strict,runtime-strict,optional-strict,headless-strict | |
os: ubuntu-latest | |
arch: auto | |
gdal-requirement-txt: requirements/gdal-strict.txt | |
- python-version: '3.8' | |
install-extras: tests,optional,headless | |
os: ubuntu-latest | |
arch: auto | |
gdal-requirement-txt: requirements/gdal.txt | |
- python-version: '3.9' | |
install-extras: tests,optional,headless | |
os: ubuntu-latest | |
arch: auto | |
gdal-requirement-txt: requirements/gdal.txt | |
- python-version: '3.10' | |
install-extras: tests,optional,headless | |
os: ubuntu-latest | |
arch: auto | |
gdal-requirement-txt: requirements/gdal.txt | |
- python-version: '3.11' | |
install-extras: tests,optional,headless | |
os: ubuntu-latest | |
arch: auto | |
gdal-requirement-txt: requirements/gdal.txt | |
steps: | |
- name: Checkout source | |
uses: actions/checkout@v3 | |
- name: Set up QEMU | |
uses: docker/setup-qemu-action@v2 | |
if: runner.os == 'Linux' && matrix.arch != 'auto' | |
with: | |
platforms: all | |
- name: Setup Python | |
uses: actions/setup-python@v4.6.1 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- uses: actions/download-artifact@v3 | |
name: Download wheels | |
with: | |
name: wheels | |
path: wheelhouse | |
- name: Install wheel ${{ matrix.install-extras }} | |
shell: bash | |
env: | |
INSTALL_EXTRAS: ${{ matrix.install-extras }} | |
GDAL_REQUIREMENT_TXT: ${{ matrix.gdal-requirement-txt }} | |
run: |- | |
echo "Finding the path to the wheel" | |
ls wheelhouse || echo "wheelhouse does not exist" | |
echo "Installing helpers" | |
pip install setuptools>=0.8 setuptools_scm wheel build -U | |
pip install tomli pkginfo | |
export WHEEL_FPATH=$(python -c "import pathlib; print(str(sorted(pathlib.Path('wheelhouse').glob('pycold*.whl'))[-1]).replace(chr(92), chr(47)))") | |
export MOD_VERSION=$(python -c "from pkginfo import Wheel; print(Wheel('$WHEEL_FPATH').version)") | |
pip install -r "$GDAL_REQUIREMENT_TXT" | |
pip install --prefer-binary "pycold[$INSTALL_EXTRAS]==$MOD_VERSION" -f wheelhouse | |
echo "Install finished." | |
- name: Test wheel ${{ matrix.install-extras }} | |
shell: bash | |
env: | |
CI_PYTHON_VERSION: py${{ matrix.python-version }} | |
run: |- | |
echo "Creating test sandbox directory" | |
export WORKSPACE_DNAME="testdir_${CI_PYTHON_VERSION}_${GITHUB_RUN_ID}_${RUNNER_OS}" | |
echo "WORKSPACE_DNAME=$WORKSPACE_DNAME" | |
mkdir -p $WORKSPACE_DNAME | |
echo "cd-ing into the workspace" | |
cd $WORKSPACE_DNAME | |
pwd | |
ls -altr | |
# Get the path to the installed package and run the tests | |
export MOD_DPATH=$(python -c "import pycold, os; print(os.path.dirname(pycold.__file__))") | |
echo " | |
--- | |
MOD_DPATH = $MOD_DPATH | |
--- | |
running the pytest command inside the workspace | |
--- | |
" | |
python -m pytest --verbose -p pytester -p no:doctest --xdoctest --cov-config ../pyproject.toml --cov-report term --cov="pycold" "$MOD_DPATH" ../tests | |
echo "pytest command finished, moving the coverage file to the repo root" | |
ls -al | |
# Move coverage file to a new name | |
mv .coverage "../.coverage.$WORKSPACE_DNAME" | |
echo "changing directory back to th repo root" | |
cd .. | |
ls -al | |
- name: Combine coverage Linux | |
if: runner.os == 'Linux' | |
run: |- | |
echo '############ PWD' | |
pwd | |
cp .wheelhouse/.coverage* . || true | |
ls -al | |
python -m pip install coverage[toml] | |
echo '############ combine' | |
coverage combine . || true | |
echo '############ XML' | |
coverage xml -o ./coverage.xml || true | |
echo '### The cwd should now have a coverage.xml' | |
ls -altr | |
pwd | |
- uses: codecov/codecov-action@v3 | |
name: Codecov Upload | |
with: | |
file: ./coverage.xml | |
test_deploy: | |
name: Uploading Test to PyPi | |
runs-on: ubuntu-latest | |
if: github.event_name == 'push' && ! startsWith(github.event.ref, 'refs/tags') && ! startsWith(github.event.ref, 'refs/heads/release') | |
needs: | |
- build_and_test_sdist | |
- build_binpy_wheels | |
- test_binpy_wheels | |
steps: | |
- name: Checkout source | |
uses: actions/checkout@v3 | |
- uses: actions/download-artifact@v3 | |
name: Download wheels and sdist | |
with: | |
name: wheels | |
path: wheelhouse | |
- name: Show files to upload | |
shell: bash | |
run: ls -la wheelhouse | |
- name: Publish | |
env: | |
TWINE_REPOSITORY_URL: https://test.pypi.org/legacy/ | |
TWINE_USERNAME: __user__ | |
TWINE_PASSWORD: ${{ secrets.PYPY_TEST }} | |
run: |- | |
pip install urllib3 requests[security] twine -U | |
twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/* --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } | |
live_deploy: | |
name: Uploading Live to PyPi | |
runs-on: ubuntu-latest | |
if: github.event_name == 'push' && (startsWith(github.event.ref, 'refs/tags') || startsWith(github.event.ref, 'refs/heads/release')) | |
needs: | |
- build_and_test_sdist | |
- build_binpy_wheels | |
- test_binpy_wheels | |
steps: | |
- name: Checkout source | |
uses: actions/checkout@v3 | |
- uses: actions/download-artifact@v3 | |
name: Download wheels and sdist | |
with: | |
name: wheels | |
path: wheelhouse | |
- name: Show files to upload | |
shell: bash | |
run: ls -la wheelhouse | |
- name: Publish | |
env: | |
TWINE_REPOSITORY_URL: https://upload.pypi.org/legacy/ | |
TWINE_USERNAME: __user__ | |
TWINE_PASSWORD: ${{ secrets.PYPI }} | |
run: |- | |
pip install urllib3 requests[security] twine -U | |
twine upload --username __token__ --password "$TWINE_PASSWORD" --repository-url "$TWINE_REPOSITORY_URL" wheelhouse/* --skip-existing --verbose || { echo "failed to twine upload" ; exit 1; } | |