invest/.github/workflows/build-and-test.yml

349 lines
14 KiB
YAML

name: Run Tests, Build Python Distributions
on:
push:
pull_request:
jobs:
# This job will check for obvious syntax errors.
# The other jobs in this file depend on this one and won't show up in the
# jobs list on actions until this one completes successfully.
# Best to run in linux ... windows won't honor nonzero exit statuses
# by default.
check-syntax-errors:
name: "Check for syntax errors"
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
# Fetch complete history for accurate versioning
fetch-depth: 0
- name: Set up python 3.9
uses: actions/setup-python@v1
with:
python-version: 3.9
- name: Set up environment
run: python -m pip install --upgrade pip setuptools setuptools_scm flake8
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
python -m flake8 src --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
python -m flake8 src --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
run-model-tests:
name: "Run model tests"
runs-on: ${{ matrix.os }}
needs: check-syntax-errors
strategy:
fail-fast: false
max-parallel: 4
matrix:
python-version: [3.7, 3.8, 3.9]
python-architecture: [x64]
os: [windows-latest, macos-latest]
include:
- python-version: 3.7
numpy: "numpy=1.15" # fuzzy assertion in conda is single '='
gdal: "gdal==3.1.2"
- python-version: 3.8
numpy: "numpy=1.16"
gdal: "gdal==3.2.0"
- python-version: 3.9
numpy: "numpy=1.20"
gdal: "gdal==3.3.1"
steps:
- uses: actions/checkout@v2
with:
# Fetch complete history for accurate versioning
fetch-depth: 0
# Taken from https://github.com/actions/cache/blob/master/examples.md#python---pip
- name: Restore pip cache
uses: actions/cache@v2
with:
path: ~\AppData\Local\pip\Cache
key: ${{ runner.os }}-py${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-${{ hashFiles('**/requirements*.txt') }}-model-tests
# NOTE: It takes twice as long to save the sample data cache
# as it does to do a fresh clone (almost 5 minutes vs. 2.5 minutes)
# Test data is way, way faster by contrast (on the order of a few
# seconds to archive).
- name: Restore git-LFS test data cache
uses: actions/cache@v2
with:
path: data/invest-test-data
key: git-lfs-testdata-${{ hashfiles('Makefile') }}
- name: Setup conda environment
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: ${{ matrix.os }}-test-env
auto-update-conda: true
python-version: ${{ matrix.python-version }}
channels: conda-forge
- name: Install build dependencies
shell: bash -l {0}
run: |
conda install nomkl # make sure numpy is w/out MKL
conda upgrade -y pip setuptools
conda install ${{ matrix.numpy }} ${{ matrix.gdal }} toml twine
conda install $(python -c "import toml;print(' '.join(toml.load('pyproject.toml')['build-system']['requires']))")
- name: Build wheel
shell: bash -l {0}
run: |
python setup.py bdist_wheel
python -m twine check dist/*
- name: Install runtime dependencies
shell: bash -l {0}
run: |
python ./scripts/convert-requirements-to-conda-yml.py \
requirements.txt requirements-dev.txt \
requirements-gui.txt > requirements-all.yml
conda env update --file requirements-all.yml
- name: Conda Environment Listing
shell: bash -l {0}
run: |
conda list
conda list --export > conda-env.txt
- name: Download Conda Env Artifact
continue-on-error: true
# Using 'dawidd6' since 'download-artifact' GH action doesn't
# support downloading artifacts from prior workflow runs
uses: dawidd6/action-download-artifact@v2
with:
workflow: build-and-test.yml
# Get frozen conda env artifact from last successful workflow
workflow_conclusion: success
name: Conda Env for ${{ matrix.os }} ${{ matrix.python-version }} ${{ matrix.python-architecture }}
path: ./conda-env-artifact
- name: Compare Conda Environments
continue-on-error: true
shell: bash -l {0}
run: |
diff ./conda-env.txt ./conda-env-artifact/conda-env.txt
- name: Run model tests
shell: bash -l {0}
run: |
pip install $(find dist -name "natcap.invest*.whl")
make test
- uses: actions/upload-artifact@v2
with:
name: Wheel for ${{ matrix.os }} ${{ matrix.python-version }} ${{ matrix.python-architecture }}
path: dist
- uses: actions/upload-artifact@v2
with:
name: Conda Env for ${{ matrix.os }} ${{ matrix.python-version }} ${{ matrix.python-architecture }}
path: ./conda-env.txt
- name: Set up GCP
# Secrets not available in PR so don't use GCP.
if: github.event_name != 'pull_request'
uses: google-github-actions/setup-gcloud@master
with:
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }}
- name: Deploy artifacts to GCS
# Secrets not available in PR so don't use GCP.
if: github.event_name != 'pull_request'
shell: bash -l {0}
run: |
# Specify which python version we want to use (it's the one
# we're using in actions/setup-miniconda)
export CLOUDSDK_PYTHON=$(which python)
export CLOUDSDK_GSUTIL_PYTHON=$(which python)
# setup-gcloud action adds 'gsutil' to the PATH
make GSUTIL="gsutil" deploy
test-source-distribution:
name: "Check sdist"
runs-on: ${{ matrix.os }}
needs: check-syntax-errors
strategy:
matrix:
python-version: [3.7, 3.8, 3.9]
python-architecture: [x64]
os: [windows-latest, macos-latest]
steps:
- uses: actions/checkout@v2
- name: Fetch git tags
run: git fetch origin +refs/tags/*:refs/tags/*
- name: Setup conda environment
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: source-env
auto-update-conda: true
python-version: ${{ matrix.python-version }}
channels: conda-forge
- name: Install dependencies
shell: bash -l {0}
run: |
conda install nomkl # make sure numpy is w/out MKL
conda upgrade -y pip setuptools
conda install toml twine
conda install $(python -c "import toml;print(' '.join(toml.load('pyproject.toml')['build-system']['requires']))")
python ./scripts/convert-requirements-to-conda-yml.py \
requirements.txt > requirements-all.yml
conda env update --file requirements-all.yml
- name: Build source distribution
shell: bash -l {0}
run: |
python setup.py build_ext sdist
python -m twine check dist/*
- name: Install from source distribution
shell: bash -l {0}
run : |
# Prevent pip from thinking that CWD is a natcap.invest
# installation. It's not.
rm -r natcap.invest.egg-info
# Install natcap.invest from the sdist in dist/
pip install $(find dist -name "natcap.invest*")
# Model tests should cover model functionality, we just want
# to be sure that we can import `natcap.invest` here.
# The point here is to make sure that we can build
# natcap.invest from source and that it imports.
python -c "from natcap.invest import *"
- uses: actions/upload-artifact@v2
with:
name: Source distribution
path: dist
- name: Set up GCP
# Secrets not available in PR so don't use GCP.
if: github.event_name != 'pull_request'
uses: google-github-actions/setup-gcloud@master
with:
service_account_key: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }}
- name: Deploy artifacts to GCS
# Secrets not available in PR so don't use GCP.
# Only upload sdist in one of the matrix cases so we don't
# overwrite artifacts or have duplicates (mac/windows sdists have
# different extensions)
if: github.event_name != 'pull_request' && matrix.os == 'macos-latest' && matrix.python-version == '3.7'
shell: bash -l {0}
run: |
# Specify which python version we want to use (it's the one
# we're using in actions/setup-miniconda)
export CLOUDSDK_PYTHON=$(which python)
export CLOUDSDK_GSUTIL_PYTHON=$(which python)
# setup-gcloud action adds 'gsutil' to the PATH
make GSUTIL="gsutil" deploy
validate-sampledata:
name: "Validate sample datastacks"
runs-on: windows-latest
needs: check-syntax-errors
steps:
- uses: actions/checkout@v2
with:
# Fetch complete history for accurate versioning
fetch-depth: 0
- name: Restore pip cache
uses: actions/cache@v2
with:
path: ~\AppData\Local\pip\Cache
key: windows-py37-pipcache-sampledata-${{ hashfiles('requirements.txt') }}
- name: Setup conda environment
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: validate-env
auto-update-conda: true
python-version: 3.9
channels: conda-forge
- name: Install dependencies
shell: bash -l {0}
run: |
conda install nomkl # make sure numpy w/out mkl
conda install pytest requests
conda install $(python -c "import toml;print(' '.join(toml.load('pyproject.toml')['build-system']['requires']))")
conda upgrade -y pip wheel
python ./scripts/convert-requirements-to-conda-yml.py requirements.txt > requirements.yml
conda env update --file requirements.yml
python setup.py install
- name: Validate sample data
shell: bash -l {0}
run: make validate_sampledata
run-ui-tests:
name: "Run UI Tests"
runs-on: ${{ matrix.os }}
needs: check-syntax-errors
strategy:
fail-fast: False
max-parallel: 4
matrix:
python-version: [3.7, 3.8, 3.9]
python-architecture: [x64]
os: [windows-latest]
steps:
- uses: actions/checkout@v2
with:
# Fetch complete history for accurate versioning
fetch-depth: 0
# Taken from https://github.com/actions/cache/blob/master/examples.md#python---pip
- name: Restore pip cache
uses: actions/cache@v2
with:
path: ~\AppData\Local\pip\Cache
key: ${{ runner.os }}-py${{ matrix.python-version }}-${{ matrix.python-architecture }}-pip-${{ hashFiles('**/requirements*.txt') }}-ui-tests
- name: Restore git-LFS test data cache
uses: actions/cache@v2
with:
path: data/invest-test-data
key: git-lfs-testdata-${{ hashfiles('Makefile') }}
- name: Setup conda environment
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: ui-env
auto-update-conda: true
python-version: ${{ matrix.python-version }}
channels: conda-forge
- name: Install dependencies
shell: bash -l {0}
run: |
conda install nomkl # make sure numpy w/out mkl
conda install requests
conda upgrade -y pip setuptools
python ./scripts/convert-requirements-to-conda-yml.py \
requirements.txt requirements-dev.txt \
requirements-gui.txt > requirements-all.yml
conda env update --file requirements-all.yml
python setup.py install
- name: Run UI tests
shell: bash -l {0}
timeout-minutes: 10 # tests usually take < 2 minutes, so 10 is generous.
run: make test_ui