diff --git a/.azure-pipelines.yml b/.azure-pipelines.yml deleted file mode 100644 index c31749e1c..000000000 --- a/.azure-pipelines.yml +++ /dev/null @@ -1,268 +0,0 @@ -# Configuration for Azure Pipelines -######################################################################################## - -# Only build the master branch, tags, and PRs (on by default) to avoid building random -# branches in the repository until a PR is opened. -trigger: - branches: - include: - - master - - refs/tags/* - -# Make sure triggers are set for PRs to any branch. -pr: - branches: - include: - - '*' - - -jobs: - -# Linux -######################################################################################## -- job: - displayName: 'Style Checks' - - pool: - vmImage: 'ubuntu-16.04' - - variables: - CONDA_INSTALL_EXTRA: "black>=20.8b1 flake8 pylint==2.4.*" - PYTHON: '3.7' - - steps: - - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.7' - - - bash: echo "##vso[task.prependpath]/usr/share/miniconda/bin" - displayName: Add conda to PATH - - # Get the Fatiando CI scripts - - bash: git clone --branch=1.2.0 --depth=1 https://github.com/fatiando/continuous-integration.git - displayName: Fetch the Fatiando CI scripts - - # Setup dependencies and build a conda environment - - bash: source continuous-integration/azure/setup-miniconda.sh - displayName: Setup Miniconda - - # Show installed pkg information for postmortem diagnostic - - bash: | - set -x -e - source activate testing - conda list - displayName: List installed packages - - # Check that the code passes format checks - - bash: | - set -x -e - source activate testing - make check - displayName: Formatting check (black and flake8) - condition: succeededOrFailed() - - # Check that the code passes linting checks - - bash: | - set -x -e - source activate testing - make lint - displayName: Linting (pylint) - condition: succeededOrFailed() - - -# Mac -######################################################################################## -- job: - displayName: 'Mac' - - pool: - vmImage: 'macOS-10.15' - - variables: - CONDA_REQUIREMENTS: requirements.txt - CONDA_REQUIREMENTS_DEV: requirements-dev.txt - CONDA_INSTALL_EXTRA: "codecov" - VERDE_DATA_DIR: "$(Agent.TempDirectory)/.verde/data" - - strategy: - matrix: - Python38: - python.version: '3.8' - PYTHON: '3.8' - Python37: - python.version: '3.7' - PYTHON: '3.7' - Python36: - python.version: '3.6' - PYTHON: '3.6' - Python37-Optional: - python.version: '3.7' - PYTHON: '3.7' - CONDA_INSTALL_EXTRA: "codecov pykdtree numba" - DASK_SCHEDULER: "synchronous" - Python36-Optional: - python.version: '3.6' - PYTHON: '3.6' - CONDA_INSTALL_EXTRA: "codecov pykdtree numba" - DASK_SCHEDULER: "synchronous" - - steps: - - - bash: echo "##vso[task.prependpath]$CONDA/bin" - displayName: Add conda to PATH - - # On Hosted macOS, the agent user doesn't have ownership of Miniconda's installation - # directory We need to take ownership if we want to update conda or install packages - # globally - - bash: sudo chown -R $USER $CONDA - displayName: Take ownership of conda installation - - # Get the Fatiando CI scripts - - bash: git clone --branch=1.2.0 --depth=1 https://github.com/fatiando/continuous-integration.git - displayName: Fetch the Fatiando CI scripts - - # Setup dependencies and build a conda environment - - bash: source continuous-integration/azure/setup-miniconda.sh - displayName: Setup Miniconda - - # Show installed pkg information for postmortem diagnostic - - bash: | - set -x -e - source activate testing - conda list - displayName: List installed packages - - # Copy the test data to the cache folder - - bash: | - set -x -e - mkdir -p ${VERDE_DATA_DIR}/master - cp -r data/* ${VERDE_DATA_DIR}/master - displayName: Copy test data to cache - - # Install the package - - bash: | - set -x -e - source activate testing - python setup.py bdist_wheel - pip install dist/* - displayName: Install the package - - # Run the tests - - bash: | - set -x -e - source activate testing - ls ${VERDE_DATA_DIR}/master - make test - displayName: Test - - # Build the documentation - - bash: | - set -x -e - source activate testing - make -C doc clean all - displayName: Build the documentation - - # Upload test coverage if there were no failures - - bash: | - set -x -e - source activate testing - coverage xml - echo "Uploading coverage to Codecov" - codecov -e PYTHON AGENT_OS - env: - CODECOV_TOKEN: $(codecov.token) - condition: succeeded() - displayName: Upload coverage - - -# Windows -######################################################################################## -- job: - displayName: 'Windows' - - pool: - vmImage: 'vs2017-win2016' - - variables: - CONDA_REQUIREMENTS: requirements.txt - CONDA_REQUIREMENTS_DEV: requirements-dev.txt - CONDA_INSTALL_EXTRA: "codecov" - VERDE_DATA_DIR: "$(Agent.TempDirectory)/.verde/data" - - strategy: - matrix: - Python38: - python.version: '3.8' - PYTHON: '3.8' - Python37: - python.version: '3.7' - PYTHON: '3.7' - Python36: - python.version: '3.6' - PYTHON: '3.6' - Python37-Optional: - python.version: '3.7' - PYTHON: '3.7' - CONDA_INSTALL_EXTRA: "codecov pykdtree numba" - DASK_SCHEDULER: "synchronous" - Python36-Optional: - python.version: '3.6' - PYTHON: '3.6' - CONDA_INSTALL_EXTRA: "codecov pykdtree numba" - DASK_SCHEDULER: "synchronous" - - steps: - - - powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts" - displayName: Add conda to PATH - - # Get the Fatiando CI scripts - - script: git clone --branch=1.2.0 --depth=1 https://github.com/fatiando/continuous-integration.git - displayName: Fetch the Fatiando CI scripts - - # Setup dependencies and build a conda environment - - script: continuous-integration/azure/setup-miniconda.bat - displayName: Setup Miniconda - - # Show installed pkg information for postmortem diagnostic - - bash: | - set -x -e - source activate testing - conda list - displayName: List installed packages - - # Copy the test data to the cache folder - - bash: | - set -x -e - mkdir -p ${VERDE_DATA_DIR}/master - cp -r data/* ${VERDE_DATA_DIR}/master - displayName: Copy test data to cache - - # Install the package that we want to test - - bash: | - set -x -e - source activate testing - python setup.py sdist --formats=zip - pip install dist/* - displayName: Install the package - - # Run the tests - - bash: | - set -x -e - source activate testing - ls ${VERDE_DATA_DIR}/master - make test - displayName: Test - - # Upload test coverage if there were no failures - - bash: | - set -x -e - source activate testing - coverage report -m - coverage xml - codecov -e PYTHON AGENT_OS - env: - CODECOV_TOKEN: $(codecov.token) - condition: succeeded() - displayName: Upload coverage diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 000000000..fcebcd369 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,213 @@ +# Build the documentation and deploy to GitHub Pages using GitHub Actions. +# +# NOTE: Pin actions to a specific commit to avoid having the authentication +# token stolen if the Action is compromised. See the comments and links here: +# https://github.com/pypa/gh-action-pypi-publish/issues/27 +# +name: docs + +# Only build PRs, the master branch, and releases. Pushes to branches will only +# be built when a PR is opened. This avoids duplicated buids in PRs comming +# from branches in the origin repository (1 for PR and 1 for push). +on: + pull_request: + push: + branches: + - master + release: + types: + - published + +# Use bash by default in all jobs +defaults: + run: + # The -l {0} is necessary for conda environments to be activated + # But this breaks on MacOS if using actions/setup-python: + # https://github.com/actions/setup-python/issues/132 + shell: bash -l {0} + +jobs: + ############################################################################# + # Build the docs + build: + runs-on: ubuntu-latest + env: + CONDA_REQUIREMENTS: requirements.txt + CONDA_REQUIREMENTS_DEV: requirements-dev.txt + CONDA_REQUIREMENTS_OPTIONAL: requirements-optional.txt + CONDA_INSTALL_EXTRA: + PYTHON: 3.8 + + steps: + # Cancel any previous run of the test job + # We pin the commit hash corresponding to v0.5.0, and not pinning the tag + # because we are giving full access through the github.token. + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@148d9a848c6acaf90a3ec30bc5062f646f8a4163 + with: + access_token: ${{ github.token }} + + # Checks-out your repository under $GITHUB_WORKSPACE + - name: Checkout + uses: actions/checkout@v2 + with: + # Need to fetch more than the last commit so that setuptools-scm can + # create the correct version string. If the number of commits since + # the last release is greater than this, the version still be wrong. + # Increase if necessary. + fetch-depth: 100 + # The GitHub token is preserved by default but this job doesn't need + # to be able to push to GitHub. + persist-credentials: false + + # Need the tags so that setuptools-scm can form a valid version number + - name: Fetch git tags + run: git fetch origin 'refs/tags/*:refs/tags/*' + + - name: Setup caching for conda packages + uses: actions/cache@v2 + with: + path: ~/conda_pkgs_dir + key: conda-${{ runner.os }}-${{ env.PYTHON }}-${{ hashFiles('requirements*.txt') }} + + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v2.0.1 + with: + python-version: ${{ env.PYTHON }} + miniconda-version: "latest" + auto-update-conda: true + channels: conda-forge + show-channel-urls: true + activate-environment: testing + # Needed for caching + use-only-tar-bz2: true + + - name: Install requirements + run: | + requirements_file=full-conda-requirements.txt + if [ ! -z "$CONDA_REQUIREMENTS" ]; then + echo "Capturing dependencies from $CONDA_REQUIREMENTS" + cat $CONDA_REQUIREMENTS >> $requirements_file + fi + if [ ! -z "$CONDA_REQUIREMENTS_DEV" ]; then + echo "Capturing dependencies from $CONDA_REQUIREMENTS_DEV" + cat $CONDA_REQUIREMENTS_DEV >> $requirements_file + fi + if [ "$DEPENDENCIES" == "optional" ]; then + echo "Capturing optional dependencies from $CONDA_REQUIREMENTS_OPTIONAL" + cat $CONDA_REQUIREMENTS_OPTIONAL >> $requirements_file + fi + if [ ! -z "$CONDA_INSTALL_EXTRA" ]; then + echo "Capturing extra dependencies: $CONDA_INSTALL_EXTRA" + echo "# Extra" >> $requirements_file + # Use xargs to print one argument per line + echo $CONDA_INSTALL_EXTRA | xargs -n 1 >> $requirements_file + fi + if [ -f $requirements_file ]; then + echo "Collected dependencies:" + cat $requirements_file + echo "" + conda install --quiet --file $requirements_file python=$PYTHON + else + echo "No requirements defined." + fi + + - name: List installed packages + run: conda list + + - name: Build source and wheel distributions + run: | + python setup.py sdist bdist_wheel + echo "" + echo "Generated files:" + ls -lh dist/ + + - name: Install the package + run: pip install --no-deps dist/*.whl + + - name: Build the documentation + run: make -C doc clean all + + # Store the docs as a build artifact so we can deploy it later + - name: Upload HTML documentation as an artifact + uses: actions/upload-artifact@v2 + with: + name: docs-${{ github.sha }} + path: doc/_build/html + + ############################################################################# + # Publish the documentation to gh-pages + publish: + runs-on: ubuntu-latest + needs: build + if: github.event_name == 'release' || github.event_name == 'push' + + steps: + - name: Checkout + uses: actions/checkout@v2 + + # Fetch the built docs from the "build" job + - name: Download HTML documentation artifact + uses: actions/download-artifact@v2 + with: + name: docs-${{ github.sha }} + path: doc/_build/html + + - name: Checkout the gh-pages branch in a separate folder + uses: actions/checkout@5a4ac9002d0be2fb38bd78e4b4dbde5606d7042f + with: + ref: gh-pages + # Checkout to this folder instead of the current one + path: deploy + # Download the entire history + fetch-depth: 0 + + - name: Push the built HTML to gh-pages + run: | + # Detect if this is a release or from the master branch + if [[ "${{ github.event_name }}" == "release" ]]; then + # Get the tag name without the "refs/tags/" part + version="${GITHUB_REF#refs/*/}" + else + version=dev + fi + echo "Deploying version: $version" + # Make the new commit message. Needs to happen before cd into deploy + # to get the right commit hash. + message="Deploy $version from $(git rev-parse --short HEAD)" + cd deploy + # Need to have this file so that Github doesn't try to run Jekyll + touch .nojekyll + # Delete all the files and replace with our new set + echo -e "\nRemoving old files from previous builds of ${version}:" + rm -rvf ${version} + echo -e "\nCopying HTML files to ${version}:" + cp -Rvf ../doc/_build/html/ ${version}/ + # If this is a new release, update the link from /latest to it + if [[ "${version}" != "dev" ]]; then + echo -e "\nSetup link from ${version} to 'latest'." + rm -f latest + ln -sf ${version} latest + fi + # Stage the commit + git add -A . + echo -e "\nChanges to be applied:" + git status + # Configure git to be the GitHub Actions account + git config user.email "github-actions[bot]@users.noreply.github.com" + git config user.name "github-actions[bot]" + # If this is a dev build and the last commit was from a dev build + # (detect if "dev" was in the previous commit message), reuse the + # same commit + if [[ "${version}" == "dev" && `git log -1 --format='%s'` == *"dev"* ]]; then + echo -e "\nAmending last commit:" + git commit --amend --reset-author -m "$message" + else + echo -e "\nMaking a new commit:" + git commit -m "$message" + fi + # Make the push quiet just in case there is anything that could leak + # sensitive information. + echo -e "\nPushing changes to gh-pages." + git push -fq origin gh-pages 2>&1 >/dev/null + echo -e "\nFinished uploading generated files." diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml new file mode 100644 index 000000000..1a67191be --- /dev/null +++ b/.github/workflows/pypi.yml @@ -0,0 +1,91 @@ +# Publish archives to PyPI and TestPyPI using GitHub Actions. +# +# NOTE: Pin actions to a specific commit to avoid having the authentication +# token stolen if the Action is compromised. See the comments and links here: +# https://github.com/pypa/gh-action-pypi-publish/issues/27 +# +name: pypi + +# Only run for pushes to the master branch and releases. +on: + push: + branches: + - master + release: + types: + - published + +# Use bash by default in all jobs +defaults: + run: + shell: bash + +jobs: + ############################################################################# + # Publish built wheels and source archives to PyPI and test PyPI + publish: + runs-on: ubuntu-latest + # Only publish from the origin repository, not forks + if: github.repository_owner == 'fatiando' + + steps: + # Checks-out your repository under $GITHUB_WORKSPACE + - name: Checkout + uses: actions/checkout@v2 + with: + # Need to fetch more than the last commit so that setuptools_scm can + # create the correct version string. If the number of commits since + # the last release is greater than this, the version will still be + # wrong. Increase if necessary. + fetch-depth: 100 + # The GitHub token is preserved by default but this job doesn't need + # to be able to push to GitHub. + persist-credentials: false + + # Need the tags so that setuptools-scm can form a valid version number + - name: Fetch git tags + run: git fetch origin 'refs/tags/*:refs/tags/*' + + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.8" + + - name: Install requirements + run: python -m pip install setuptools twine wheel + + - name: List installed packages + run: python -m pip freeze + + - name: Build source and wheel distributions + run: | + # Change setuptools-scm local_scheme to "no-local-version" so the + # local part of the version isn't included, making the version string + # compatible with Test PyPI. + sed --in-place "s/node-and-date/no-local-version/g" setup.py + python setup.py sdist bdist_wheel + echo "" + echo "Generated files:" + ls -lh dist/ + + - name: Check the archives + run: twine check dist/* + + - name: Publish to Test PyPI + if: success() + uses: pypa/gh-action-pypi-publish@bce3b74dbf8cc32833ffba9d15f83425c1a736e0 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_TOKEN}} + repository_url: https://test.pypi.org/legacy/ + # Allow existing releases on test PyPI without errors. + # NOT TO BE USED in PyPI! + skip_existing: true + + - name: Publish to PyPI + # Only publish to PyPI when a release triggers the build + if: success() && github.event_name == 'release' + uses: pypa/gh-action-pypi-publish@bce3b74dbf8cc32833ffba9d15f83425c1a736e0 + with: + user: __token__ + password: ${{ secrets.PYPI_TOKEN}} diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml new file mode 100644 index 000000000..631925dc0 --- /dev/null +++ b/.github/workflows/style.yml @@ -0,0 +1,86 @@ +# Linting and style checks with GitHub Actions +# +# NOTE: Pin actions to a specific commit to avoid having the authentication +# token stolen if the Action is compromised. See the comments and links here: +# https://github.com/pypa/gh-action-pypi-publish/issues/27 +# +name: code-style + +# Only build PRs and the master branch. Pushes to branches will only be built +# when a PR is opened. +on: + pull_request: + push: + branches: + - master + +############################################################################### +jobs: + black: + name: black [format] + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + persist-credentials: false + + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.8" + + - name: Install requirements + run: pip install black>=20.8b1 + + - name: List installed packages + run: pip freeze + + - name: Check code format + run: make black-check + + flake8: + name: flake8 [style] + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + persist-credentials: false + + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.8" + + - name: Install requirements + run: pip install flake8 + + - name: List installed packages + run: pip freeze + + - name: Check code style + run: make flake8 + + pylint: + name: pylint [style] + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + persist-credentials: false + + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.8" + + - name: Install requirements + run: pip install pylint==2.4.* + + - name: List installed packages + run: pip freeze + + - name: Linting (pylint) + run: make lint diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..93415f10c --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,181 @@ +# Run tests and upload to Codecov with GitHub Actions +# +# NOTE: Pin actions to a specific commit to avoid having the authentication +# token stolen if the Action is compromised. See the comments and links here: +# https://github.com/pypa/gh-action-pypi-publish/issues/27 +# +name: test + +# Only build PRs, the master branch, and releases. Pushes to branches will only +# be built when a PR is opened. This avoids duplicated buids in PRs comming +# from branches in the origin repository (1 for PR and 1 for push). +on: + pull_request: + push: + branches: + - master + release: + types: + - published + +# Use bash by default in all jobs +defaults: + run: + # The -l {0} is necessary for conda environments to be activated + # But this breaks on MacOS if using actions/setup-python: + # https://github.com/actions/setup-python/issues/132 + shell: bash -l {0} + +jobs: + ############################################################################# + # Run tests and upload to codecov + test: + name: ${{ matrix.os }} py${{ matrix.python }} ${{ matrix.dependencies }} + runs-on: ${{ matrix.os }}-latest + strategy: + # Otherwise, the workflow would stop if a single job fails. We want to + # run all of them to catch failures in different combinations. + fail-fast: false + matrix: + os: [ubuntu, macos, windows] + python: [3.6, 3.7, 3.8] + # If "optional", will install non-required dependencies in the build + # environment. Otherwise, only required dependencies are installed. + dependencies: [""] + include: + - os: ubuntu + python: 3.8 + dependencies: optional + - os: macos + python: 3.8 + dependencies: optional + - os: windows + python: 3.8 + dependencies: optional + env: + CONDA_REQUIREMENTS: requirements.txt + CONDA_REQUIREMENTS_DEV: requirements-dev.txt + CONDA_REQUIREMENTS_OPTIONAL: requirements-optional.txt + CONDA_INSTALL_EXTRA: + DEPENDENCIES: ${{ matrix.dependencies }} + # Used to tag codecov submissions + OS: ${{ matrix.os }} + PYTHON: ${{ matrix.python }} + + steps: + # Cancel any previous run of the test job + # We pin the commit hash corresponding to v0.5.0, and not pinning the tag + # because we are giving full access through the github.token. + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@148d9a848c6acaf90a3ec30bc5062f646f8a4163 + with: + access_token: ${{ github.token }} + + # Checks-out your repository under $GITHUB_WORKSPACE + - name: Checkout + uses: actions/checkout@v2 + with: + # Need to fetch more than the last commit so that setuptools-scm can + # create the correct version string. If the number of commits since + # the last release is greater than this, the version still be wrong. + # Increase if necessary. + fetch-depth: 100 + # The GitHub token is preserved by default but this job doesn't need + # to be able to push to GitHub. + persist-credentials: false + + # Need the tags so that setuptools-scm can form a valid version number + - name: Fetch git tags + run: git fetch origin 'refs/tags/*:refs/tags/*' + + - name: Setup caching for conda packages + uses: actions/cache@v2 + with: + path: ~/conda_pkgs_dir + key: conda-${{ runner.os }}-${{ matrix.python }}-${{ hashFiles('requirements*.txt') }} + + - name: Setup Miniconda + uses: conda-incubator/setup-miniconda@v2.0.1 + with: + python-version: ${{ matrix.python }} + miniconda-version: "latest" + auto-update-conda: true + channels: conda-forge + show-channel-urls: true + activate-environment: testing + # Needed for caching + use-only-tar-bz2: true + + - name: Install requirements + run: | + requirements_file=full-conda-requirements.txt + if [ ! -z "$CONDA_REQUIREMENTS" ]; then + echo "Capturing dependencies from $CONDA_REQUIREMENTS" + cat $CONDA_REQUIREMENTS >> $requirements_file + fi + if [ ! -z "$CONDA_REQUIREMENTS_DEV" ]; then + echo "Capturing dependencies from $CONDA_REQUIREMENTS_DEV" + cat $CONDA_REQUIREMENTS_DEV >> $requirements_file + fi + if [ "$DEPENDENCIES" == "optional" ]; then + echo "Capturing optional dependencies from $CONDA_REQUIREMENTS_OPTIONAL" + cat $CONDA_REQUIREMENTS_OPTIONAL >> $requirements_file + fi + if [ ! -z "$CONDA_INSTALL_EXTRA" ]; then + echo "Capturing extra dependencies: $CONDA_INSTALL_EXTRA" + echo "# Extra" >> $requirements_file + # Use xargs to print one argument per line + echo $CONDA_INSTALL_EXTRA | xargs -n 1 >> $requirements_file + fi + if [ -f $requirements_file ]; then + echo "Collected dependencies:" + cat $requirements_file + echo "" + conda install --quiet --file $requirements_file python=$PYTHON + else + echo "No requirements defined." + fi + + - name: List installed packages + run: conda list + + - name: Build source and wheel distributions + run: | + python setup.py sdist bdist_wheel + echo "" + echo "Generated files:" + ls -lh dist/ + + - name: Install the package + run: pip install --no-deps dist/*.whl + + - name: Copy test data to cache + run: | + echo "Copy data to " ${VERDE_DATA_DIR}/master + set -x -e + mkdir -p ${VERDE_DATA_DIR}/master + cp -r data/* ${VERDE_DATA_DIR}/master + env: + # Define directory where sample data will be copied + VERDE_DATA_DIR: ${{ runner.temp }}/cache/verde + + - name: Run the tests + run: | + ls $VERDE_DATA_DIR + make test + env: + # Define directory where sample data have been copied + VERDE_DATA_DIR: ${{ runner.temp }}/cache/verde + + - name: Convert coverage report to XML for codecov + run: coverage xml + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 + with: + file: ./coverage.xml + env_vars: OS,PYTHON + # Don't mark the job as failed if the upload fails for some reason. + # It does sometimes but shouldn't be the reason for running + # everything again unless something else is broken. + fail_ci_if_error: false diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f417298e6..000000000 --- a/.travis.yml +++ /dev/null @@ -1,126 +0,0 @@ -# Configuration file for TravisCI - -# We use miniconda for Python so don't need any Python specific tools -language: generic - -os: linux -dist: xenial - -# Only build pushes to the master branch and tags. This avoids the double -# builds than happen when working on a branch instead of a fork. -branches: - only: - - master - # Regex to build tagged commits with version numbers - - /\d+\.\d+(\.\d+)?(\S*)?$/ - -# Define environment variables common to all builds -env: - global: - # Encrypted variables - # Github Token for pushing the built HTML (GH_TOKEN) - - secure: "Y8ulfXO+hO9MnChNNkwciZQ7qaj3N3sGhAPe69QPrJxxge8c4BUpQHtNrBZB4aMqxVGMfQQ7SVC4qB64WMEYYbCuD/NUaerS6wLzMXkYcS42xyA/qv7TUSJyjbvqnEz6DV92VyPaQGM6GAk+sY8b1lXoR3v+W2Ee7zxb3Z3fGkiK5gp14kjRlc66xSFr1ShnenlG766vS+NC5oeIGBMEtHNb7XcQNbXGliS1+iFi6eYxeJLa13DQhmyjUVSVSAHyNFvyTb+J5fWHii9cCxO+3GUvt3Geb/AP5wpjbr+V6EYc8Mi86/oMdq+PexsbzayggKQ0HPABvsDrpAHbiPa+TnnRv9MI5oemkzIWdDU/JCBFt0sR4JO3VyyEyXAfdK3Zp+eQvbeXmSYITlYP90fC7iDuz2YYnwyH84LYcMA9lJNAXdBEa4qrrOCawem8l77d7X/dCgiFzzWjwHKgRXyHQZrKN+rWnuVP7X1RhzAjOBkH1uVqevmyqI9Te2X0oM/vRBMk727YX3HeItgVUlvj9kAFI43sSMlN1+WNVyYSV7vjZ2pKNCaJadwwVn67Os8GGGtcQrdosF9fBGGTPcAzGYoGlyxT3u76weoNer/YpYyRW8d4kwyjEIddFEbyaLprnuGe1mMrxaYISoBV69SXzZXKJyaoBaTeRQD5JZeR9Nk=" - # PyPI password for deploying releases (TWINE_PASSWORD) - - secure: "Gvd2kH5bGIng7Wz3R4Md5d48qU0vYo0Sb4g7A1UAn8EOuWAcbkdSAq5yDiAp4pENeGceHQG0+jX+GQBZoSOMUpwAfhPkWG5HBIc+P/G+iTUyF2oELLCekcGgccPzwNgQt574FzM0PkC9L4hINNRjVtnFa+SIx72D2r1OdTvmk2+c4jXBZl52e4l5dU+Hjzwh22KNzAMtXDVuvr3NVdJZHA/ldTwEBUQfiLo2CGkgls6o8ZLixK0tCRGIFKlZko9WeBTzQYidloSo3EQx0eqiTz7qydm3UfCezA9UYPefGOtUaA/4ysqs8tgG8xrnx8NhhRqH9pfPAhgsCMwfmtibslNwH+C7gtbERT8lLY5NfU1xyDC4UxkjbwbzKQno/vPhiqEJ/uR458IdZbzUeWXlt+Rz+Dyj1lW7FqPLOl3Zpfgfv1swWqxjVwduV46c3nlgu9fEkAiEH2SzAtBlsQ2qwbJCZKXj+8Ps9FmaqvQ+SCOTAycgR9WnYoIIutpn0cs3k8zqqQyBq2zXJLkPHflVich8wKKaOsaFMCIKLWaOODCw5fLkfxck/QtlolGGFi3lh5W5p4Zxxr7KdL8f+UrkAb6gY9LStvqwe2rSG2olqc95+zozsMY/YHXTIG092WB3EmptwO9jL67D3AIVBKOdvcRYFetWMyY61ZmEK0s/43I=" - - TWINE_USERNAME=Leonardo.Uieda - - VERDE_DATA_DIR="$HOME/.verde/data" - # The file with the listed requirements to be installed by conda - - CONDA_REQUIREMENTS=requirements.txt - - CONDA_REQUIREMENTS_DEV=requirements-dev.txt - - CONDA_INSTALL_EXTRA="codecov" - # These variables control which actions are performed in a build - - DEPLOY_DOCS=false - - DEPLOY_PYPI=false - -# Specify the build configurations. Be sure to only deploy from a single build. -jobs: - include: - - name: "Linux - Python 3.8" - os: linux - env: - - PYTHON=3.8 - - name: "Linux - Python 3.7" - os: linux - env: - - PYTHON=3.7 - # Main build to deploy to PyPI and Github pages - - name: "Linux - Python 3.6 (deploy)" - os: linux - env: - - PYTHON=3.6 - - DEPLOY_DOCS=true - - DEPLOY_PYPI=true - - CONDA_INSTALL_EXTRA="codecov twine" - # Check tests pass when using optional dependencies. - - name: "Linux - Python 3.7 (optional deps)" - os: linux - env: - - PYTHON=3.7 - - DASK_SCHEDULER=synchronous - - CONDA_INSTALL_EXTRA="codecov pykdtree numba" - - name: "Linux - Python 3.6 (optional deps)" - os: linux - env: - - PYTHON=3.6 - - DASK_SCHEDULER=synchronous - - CONDA_INSTALL_EXTRA="codecov pykdtree numba" - -# Setup the build environment -before_install: - # Copy sample data to the verde data dir to avoid downloading all the time - - mkdir -p ${VERDE_DATA_DIR}/master - - cp -r data/* ${VERDE_DATA_DIR}/master - # Get the Fatiando CI scripts - - git clone --branch=1.2.0 --depth=1 https://github.com/fatiando/continuous-integration.git - # Download and install miniconda and setup dependencies - # Need to source the script to set the PATH variable globaly - - source continuous-integration/travis/setup-miniconda.sh - # Show installed pkg information for postmortem diagnostic - - conda list - -# Install the package that we want to test -install: - # Make a binary wheel for our package and install it - - python setup.py bdist_wheel - - pip install dist/* - -# Run the actual tests and checks -script: - # Run the test suite - - make test - # Build the documentation - - make -C doc clean all - -# Things to do if the build is successful -after_success: - # Upload coverage information - - coverage xml - - echo "Uploading coverage to Codecov" - - codecov -e PYTHON - -# Deploy -deploy: - # Make a release on PyPI - - provider: script - script: continuous-integration/travis/deploy-pypi.sh - on: - tags: true - condition: '$DEPLOY_PYPI == "true"' - # Push the built HTML in doc/_build/html to the gh-pages branch - - provider: script - script: continuous-integration/travis/deploy-gh-pages.sh - skip_cleanup: true - on: - branch: master - condition: '$DEPLOY_DOCS == "true"' - # Push HTML when building tags as well - - provider: script - script: continuous-integration/travis/deploy-gh-pages.sh - skip_cleanup: true - on: - tags: true - condition: '$DEPLOY_DOCS == "true"' - -# Don't send out emails every time a build fails -notifications: - email: false diff --git a/Makefile b/Makefile index 9fc244be8..52c23ed05 100644 --- a/Makefile +++ b/Makefile @@ -30,8 +30,12 @@ test: format: black $(BLACK_FILES) -check: +check: black-check flake8 + +black-check: black --check $(BLACK_FILES) + +flake8: flake8 $(FLAKE8_FILES) lint: diff --git a/requirements-optional.txt b/requirements-optional.txt new file mode 100644 index 000000000..a28d04a43 --- /dev/null +++ b/requirements-optional.txt @@ -0,0 +1,2 @@ +numba +pykdtree