Merge branch 'release/3.10' into task/stormwater

This commit is contained in:
emlys 2021-08-05 15:15:31 -06:00
commit d89eac6005
4 changed files with 116 additions and 17 deletions

View File

@ -125,17 +125,24 @@ jobs:
- name: Upload installer artifact
if: always()
uses: actions/upload-artifact@v2-preview
uses: actions/upload-artifact@v2
with:
name: InVEST-windows-installer
path: dist/*.exe
- name: Upload user's guide artifact
uses: actions/upload-artifact@v2-preview
uses: actions/upload-artifact@v2
with:
name: InVEST-user-guide
path: dist/InVEST_*_userguide.zip
- name: Upload workspace on failure
if: ${{ failure() }}
uses: actions/upload-artifact@v2
with:
name: InVEST-failed-windows-workspace
path: ${{ github.workspace }}
build-mac-binaries:
name: "Build mac binaries"
needs: check-syntax-errors
@ -214,17 +221,29 @@ jobs:
run: make deploy
- name: Upload binaries artifact
uses: actions/upload-artifact@v2-preview
uses: actions/upload-artifact@v2
with:
name: InVEST-mac-binaries
path: dist/InVEST-*-mac.zip
- name: Upload DMG artifact
uses: actions/upload-artifact@v2-preview
uses: actions/upload-artifact@v2
with:
name: InVEST-dmg
path: dist/InVEST*.dmg
- name: Tar the workspace to preserve permissions
if: ${{ failure() }}
shell: bash -l {0}
run: tar -cvf InVEST-failed-mac-workspace.tar ${{ github.workspace }}
- name: Upload workspace on failure
if: ${{ failure() }}
uses: actions/upload-artifact@v2
with:
name: InVEST-failed-mac-workspace
path: InVEST-failed-mac-workspace.tar
build-sampledata:
name: Build sampledata archives
needs: check-syntax-errors
@ -259,7 +278,7 @@ jobs:
run: make deploy
- name: Upload sample data artifact
uses: actions/upload-artifact@v2-preview
uses: actions/upload-artifact@v2
with:
name: InVEST-sample-data
path: dist/*.zip

View File

@ -12,3 +12,10 @@ if platform.system() == 'Darwin':
# See https://bugreports.qt.io/browse/QTBUG-87014
# and https://github.com/natcap/invest/issues/384
os.environ['QT_MAC_WANTS_LAYER'] = '1'
# Rtree will look in this directory first for libspatialindex_c.dylib.
# In response to issues with github mac binary builds:
# https://github.com/natcap/invest/issues/594
# sys._MEIPASS is the path to where the pyinstaller entrypoint bundle
# lives. See the pyinstaller docs for more details.
os.environ['SPATIALINDEX_C_LIBRARY'] = sys._MEIPASS

View File

@ -54,6 +54,10 @@ echo "# binary to run on OSX Big Sur." >> $new_command_file
echo "#" >> $new_command_file
echo "# Taken from https://stackoverflow.com/a/246128/299084" >> $new_command_file
echo 'DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"' >> $new_command_file
echo 'QT_MAC_WANTS_LAYER=1 "$DIR/invest_dist/invest" launch' >> $new_command_file
# QT_MAC_WANTS_LAYER is to address a hanging issue in Qt 5.13 with launching Qt
# on OS 11 Big Sur.
# QT_QPA_PLATFORM_PLUGIN_PATH addresses an issue where sometimes Qt can't find
# the plugins where they are expected to be.
echo 'QT_MAC_WANTS_LAYER=1 QT_QPA_PLATFORM_PLUGIN_PATH=$DIR/invest_dist/PySide2/Qt/plugins "$DIR/invest_dist/invest" launch' >> $new_command_file
chmod a+x $new_command_file

View File

@ -713,26 +713,93 @@ class CoastalVulnerabilityTests(unittest.TestCase):
args['slr_field'] = 'Trend'
coastal_vulnerability.execute(args)
actual_values_df = pandas.read_csv(
os.path.join(args['workspace_dir'], 'coastal_exposure.csv'))
expected_values_df = pandas.read_csv(
os.path.join(REGRESSION_DATA, 'expected_coastal_exposure.csv'))
pandas.testing.assert_frame_equal(
actual_values_df, expected_values_df, check_dtype=False)
results_vector = gdal.OpenEx(
os.path.join(args['workspace_dir'], 'coastal_exposure.gpkg'),
gdal.OF_VECTOR)
results_layer = results_vector.GetLayer()
# shore points aren't necessarily created/numbered in the same order,
# so we can't compare by the fid or shore_id fields
# instead check that each point has the expected attributes
fields = [
'R_hab', 'R_wind', 'R_wave', 'R_surge', 'R_relief', 'R_geomorph',
'R_slr', 'population', 'exposure', 'habitat_role',
'exposure_no_habitats']
# sorted list of expected x, y point coords and their corresponding expected
# values for the fields in the order above
sorted_expected_data = [
[(252692.2222200262, 5482540.950248547),
[5, 3, 2, 1, 4, 1, 5, 0, 2.49389836245, 0, 2.49389836245]],
[(265195.9118871244, 5465342.359978485),
[4.05, 5, 5, 1, 5, 1, 5, 0, 3.063308387, 0.0936167899, 3.1569251777]],
[(268404.5296770451, 5479539.256472221),
[5, 2, 2, 1, 1, 1, 5, 0, 1.9306977288, 0, 1.9306977288]],
[(275802.584962168, 5469283.47691652),
[5, 3, 1, 1, 5, 1, 4, 0, 2.2587827631, 0, 2.2587827631]],
[(286971.1600469994, 5475571.525391179),
[5, 1, 1, 3, 1, 1, 4, 0, 1.7948229213, 0, 1.7948229213]],
[(290844.10673833836, 5460752.369983306),
[5, 4, 2, 2, 2, 1, 4, 0, 2.5169979012, 0, 2.5169979012]],
[(300130.0010361069, 5437435.4230010025),
[1.7999999, 4, 5, 2, 5, 1, 3, 0, 2.712353253, 0.426215219066, 3.138568472]],
[(306112.5574508078, 5450095.8865171345),
[5, 1, 3, 3, 2, 1, 3, 0, 2.225039271, 0, 2.225039271]],
[(318255.8192637532, 5423278.964182078),
[5, 4, 3, 2, 3, 2.5, 1, 0, 2.6426195539, 0, 2.6426195539]],
[(339388.60793905176, 5428895.077843302),
[5, 3, 4, 4, 2, 4, 1, 0, 2.94471340036, 0, 2.94471340036]],
[(344736.25795214524, 5411347.428706937),
[5, 2, 4, 3, 3, 4, 1, 0, 2.8261463109, 0, 2.8261463109]],
[(355807.04065901926, 5394736.771414153),
[5, 5, 4, 4, 3, 4, 2, 0, 3.70591872429, 0, 3.70591872429]],
[(361290.81087879254, 5427975.474574804),
[5, 1, 3, 5, 1, 4, 2, 0, 2.493898362454, 0, 2.493898362454]],
[(361341.1463245464, 5433678.995435326),
[5, 2, 1, 5, 1, 4, 1, 0, 2.1316631165, 0, 2.1316631165]],
[(368269.9048903524, 5449541.99543876),
[5, 1, 1, 5, 2, 4, 2, 0.008, 2.3535468936, 0, 2.3535468936]],
[(376479.9819538344, 5383636.197270025),
[4.05, 5, 5, 4, 4, 4, 3, numpy.nan, 4.0989337064, 0.125266204816, 4.2241999112]]
]
actual_data = []
for feature in results_layer:
geom = feature.GetGeometryRef()
x = geom.GetX()
y = geom.GetY()
actual_row = []
for field in fields:
value = feature.GetField(field)
if value is None:
actual_row.append(numpy.nan)
else:
actual_row.append(value)
actual_data.append([(x, y), actual_row])
# the coords and corresponding field values should be close to expected
sorted_actual_data = sorted(actual_data)
for actual, expected in zip(sorted_actual_data, sorted_expected_data):
actual_coords, actual_data = actual[0], actual[1:]
expected_coords, expected_data = expected[0], expected[1:]
numpy.testing.assert_allclose(actual_coords, expected_coords)
numpy.testing.assert_allclose(actual_data, expected_data)
# Also expect matching shore_id field in all tabular outputs:
intermediate_csv = pandas.read_csv(
actual_values_df = pandas.read_csv(
os.path.join(args['workspace_dir'], 'coastal_exposure.csv'))
intermediate_df = pandas.read_csv(
os.path.join(
args['workspace_dir'],
'intermediate/intermediate_exposure.csv'))
habitat_csv = pandas.read_csv(
habitat_df = pandas.read_csv(
os.path.join(
args['workspace_dir'],
'intermediate/habitats/habitat_protection.csv'))
pandas.testing.assert_series_equal(
actual_values_df['shore_id'], intermediate_csv['shore_id'])
actual_values_df['shore_id'], intermediate_df['shore_id'])
pandas.testing.assert_series_equal(
actual_values_df['shore_id'], habitat_csv['shore_id'])
actual_values_df['shore_id'], habitat_df['shore_id'])
def test_final_risk_calc(self):
"""CV: regression test for the final risk score calculation."""
@ -1112,7 +1179,8 @@ class CoastalVulnerabilityTests(unittest.TestCase):
landmass_path = os.path.join(
self.workspace_dir, 'landmass.geojson')
pygeoprocessing.shapely_geometry_to_vector(
[Polygon([(-100, -100), (100, -100), (100, 100), (-100, 100), (-100, -100)])],
[Polygon([(-100, -100), (100, -100), (100, 100),
(-100, 100), (-100, -100)])],
landmass_path, wkt, 'GeoJSON')
model_resolution = 100
@ -1390,6 +1458,7 @@ def assert_pickled_arrays_almost_equal(
class CoastalVulnerabilityValidationTests(unittest.TestCase):
"""Tests for the CV Model ARGS_SPEC and validation."""
def setUp(self):
"""Create a temporary workspace."""
self.workspace_dir = tempfile.mkdtemp()