1769 lines
78 KiB
Python
1769 lines
78 KiB
Python
"""Module for Regression Testing the InVEST Coastal Vulnerability module."""
|
|
import json
|
|
import os
|
|
import pickle
|
|
import shutil
|
|
import tempfile
|
|
import unittest
|
|
|
|
import numpy.testing
|
|
import pandas.testing
|
|
import pygeoprocessing
|
|
import shapely.wkb
|
|
import taskgraph
|
|
from osgeo import gdal
|
|
from osgeo import ogr
|
|
from osgeo import osr
|
|
from shapely.geometry import LineString
|
|
from shapely.geometry import MultiLineString
|
|
from shapely.geometry import MultiPolygon
|
|
from shapely.geometry import Point
|
|
from shapely.geometry import Polygon
|
|
|
|
REGRESSION_DATA = os.path.join(
|
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
|
'coastal_vulnerability')
|
|
INPUT_DATA = os.path.join(
|
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
|
'coastal_vulnerability', 'input')
|
|
|
|
|
|
def _mean_op(array, mask):
|
|
if numpy.any(mask):
|
|
return numpy.mean(array[mask])
|
|
return numpy.nan
|
|
|
|
|
|
class CoastalVulnerabilityTests(unittest.TestCase):
|
|
"""Tests for the Coastal Vulnerability Model."""
|
|
|
|
def setUp(self):
|
|
"""Overriding setUp function to create temp workspace directory."""
|
|
# this lets us delete the workspace after its done no matter the
|
|
# the test result
|
|
self.workspace_dir = tempfile.mkdtemp()
|
|
|
|
def tearDown(self):
|
|
"""Overriding tearDown function to remove temporary directory."""
|
|
shutil.rmtree(self.workspace_dir)
|
|
|
|
@staticmethod
|
|
def generate_base_args(workspace_dir):
|
|
"""Generate an args dict with default required args."""
|
|
args = {'workspace_dir': workspace_dir,
|
|
'n_workers': -1,
|
|
'wwiii_vector_path': os.path.join(
|
|
INPUT_DATA, 'WaveWatchIII_subset.shp'),
|
|
'landmass_vector_path': os.path.join(
|
|
INPUT_DATA, 'land_polygon_simple_utm.shp'),
|
|
'aoi_vector_path': os.path.join(
|
|
INPUT_DATA, 'AOI_BarkClay.shp'),
|
|
'model_resolution': 25000,
|
|
'max_fetch_distance': 12000,
|
|
'dem_path': os.path.join(
|
|
INPUT_DATA, 'dem_wgs84.tif'),
|
|
'dem_averaging_radius': 33000.0,
|
|
'bathymetry_raster_path': os.path.join(
|
|
INPUT_DATA, 'dem_wgs84.tif'),
|
|
'habitat_table_path': os.path.join(
|
|
INPUT_DATA, "natural_habitats_wcvi.csv"),
|
|
'shelf_contour_vector_path': os.path.join(
|
|
INPUT_DATA, 'continental_shelf_contour.gpkg')
|
|
}
|
|
return args
|
|
|
|
def test_wind_and_wave_exposure(self):
|
|
"""CV: regression test for wind and wave exposure values.
|
|
|
|
The wave calculation depends on an intermediate product from
|
|
the wind calculation, so I'm testing them both in this scope.
|
|
|
|
"""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
|
|
aoi_path = os.path.join(INPUT_DATA, 'AOI_BarkClay.shp')
|
|
|
|
# these points have the WWIII values interpolated onto them.
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, "wwiii_shore_points_5000m.gpkg")
|
|
max_fetch_distance = 12000
|
|
target_fetch_point_vector_path = os.path.join(
|
|
workspace_dir, 'fetch_points.gpkg')
|
|
target_fetch_rays_vector_path = os.path.join(
|
|
workspace_dir, 'fetch_rays.gpkg')
|
|
target_wind_exposure_pickle_path = os.path.join(
|
|
workspace_dir, 'wind.pickle')
|
|
|
|
landmass_vector_path = os.path.join(
|
|
INPUT_DATA, 'land_polygon_simple_utm.shp')
|
|
landmass_polygon_pickle_path = os.path.join(
|
|
workspace_dir, 'polygon.pickle')
|
|
landmass_lines_pickle_path = os.path.join(
|
|
workspace_dir, 'lines.pickle')
|
|
landmass_lines_rtree_path = os.path.join(
|
|
workspace_dir, 'lines_rtree.dat')
|
|
|
|
base_bathy_raster_path = os.path.join(INPUT_DATA, 'dem_wgs84.tif')
|
|
target_bathy_raster_path = os.path.join(
|
|
workspace_dir, 'negative_bathymetry.tif')
|
|
proxy_vector_info = pygeoprocessing.get_vector_info(
|
|
base_shore_point_vector_path)
|
|
proxy_srs_wkt = proxy_vector_info['projection_wkt']
|
|
proxy_bounding_box = proxy_vector_info['bounding_box']
|
|
# add the max_fetch_distance to the bounding box so we can use
|
|
# the clipped raster in the fetch-ray-depth routine.
|
|
model_resolution = 5000
|
|
# add the model resolution, to be safe
|
|
fetch_buffer = max_fetch_distance + model_resolution
|
|
proxy_bounding_box[0] -= fetch_buffer
|
|
proxy_bounding_box[1] -= fetch_buffer
|
|
proxy_bounding_box[2] += fetch_buffer
|
|
proxy_bounding_box[3] += fetch_buffer
|
|
|
|
coastal_vulnerability.warp_and_mask_bathymetry(
|
|
base_bathy_raster_path, proxy_srs_wkt, proxy_bounding_box,
|
|
model_resolution, workspace_dir, '',
|
|
target_bathy_raster_path)
|
|
|
|
target_shore_points_path = os.path.join(
|
|
workspace_dir, 'tmp-shore-pts.gpkg')
|
|
|
|
coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points(
|
|
aoi_path, landmass_vector_path, model_resolution,
|
|
target_shore_points_path, landmass_polygon_pickle_path,
|
|
landmass_lines_pickle_path, landmass_lines_rtree_path)
|
|
|
|
coastal_vulnerability.calculate_wind_exposure(
|
|
base_shore_point_vector_path,
|
|
landmass_polygon_pickle_path,
|
|
landmass_lines_rtree_path,
|
|
landmass_lines_pickle_path,
|
|
target_bathy_raster_path,
|
|
target_fetch_rays_vector_path,
|
|
max_fetch_distance,
|
|
target_fetch_point_vector_path,
|
|
target_wind_exposure_pickle_path)
|
|
|
|
expected_raw_values_path = os.path.join(
|
|
REGRESSION_DATA, 'expected_wind.json')
|
|
assert_pickled_arrays_almost_equal(
|
|
target_wind_exposure_pickle_path, expected_raw_values_path)
|
|
|
|
target_wave_exposure_pickle_path = os.path.join(
|
|
workspace_dir, 'wave.pickle')
|
|
coastal_vulnerability.calculate_wave_exposure(
|
|
target_fetch_point_vector_path, max_fetch_distance,
|
|
os.path.join(workspace_dir, 'intermediate_wave.gpkg'),
|
|
target_wave_exposure_pickle_path)
|
|
|
|
expected_raw_values_path = os.path.join(
|
|
REGRESSION_DATA, 'expected_wave.json')
|
|
assert_pickled_arrays_almost_equal(
|
|
target_wave_exposure_pickle_path, expected_raw_values_path)
|
|
|
|
def test_warp_and_mask_bathymetry_with_no_nodata(self):
|
|
"""CV: warp_and_mask_bathymetry when bathy has no nodata."""
|
|
from natcap.invest import coastal_vulnerability
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, "wwiii_shore_points_5000m.gpkg")
|
|
workspace_dir = self.workspace_dir
|
|
base_bathy_raster_path = os.path.join(INPUT_DATA, 'dem_wgs84.tif')
|
|
|
|
# Copy the raster over to the workspace and unset the nodata value
|
|
# there.
|
|
no_nodata_bathy_raster_path = os.path.join(
|
|
workspace_dir, 'no_nodata_dem_wgs84.tif')
|
|
shutil.copyfile(base_bathy_raster_path, no_nodata_bathy_raster_path)
|
|
try:
|
|
raster = gdal.OpenEx(no_nodata_bathy_raster_path, gdal.GA_Update)
|
|
band = raster.GetRasterBand(1)
|
|
band.DeleteNoDataValue()
|
|
finally:
|
|
band = None
|
|
raster = None
|
|
self.assertEqual( # Make sure we correctly deleted the nodata value.
|
|
pygeoprocessing.get_raster_info(
|
|
no_nodata_bathy_raster_path)['nodata'][0],
|
|
None)
|
|
target_bathy_raster_path = os.path.join(
|
|
workspace_dir, 'negative_bathymetry.tif')
|
|
proxy_vector_info = pygeoprocessing.get_vector_info(
|
|
base_shore_point_vector_path)
|
|
proxy_srs_wkt = proxy_vector_info['projection_wkt']
|
|
proxy_bounding_box = proxy_vector_info['bounding_box']
|
|
# add the max_fetch_distance to the bounding box so we can use
|
|
# the clipped raster in the fetch-ray-depth routine.
|
|
model_resolution = 5000
|
|
# add the model resolution, to be safe
|
|
max_fetch_distance = 12000
|
|
fetch_buffer = max_fetch_distance + model_resolution
|
|
proxy_bounding_box[0] -= fetch_buffer
|
|
proxy_bounding_box[1] -= fetch_buffer
|
|
proxy_bounding_box[2] += fetch_buffer
|
|
proxy_bounding_box[3] += fetch_buffer
|
|
|
|
coastal_vulnerability.warp_and_mask_bathymetry(
|
|
no_nodata_bathy_raster_path, proxy_srs_wkt, proxy_bounding_box,
|
|
model_resolution, workspace_dir, '',
|
|
target_bathy_raster_path)
|
|
|
|
# Check that the new raster has a nodata value set.
|
|
self.assertNotEqual(
|
|
pygeoprocessing.get_raster_info(
|
|
target_bathy_raster_path)['nodata'][0],
|
|
None)
|
|
|
|
def test_extract_bathymetry(self):
|
|
"""CV: regression test for extracting bathymetry along ray."""
|
|
# Make a simple raster
|
|
from natcap.invest import coastal_vulnerability
|
|
raster_path = os.path.join(self.workspace_dir, 'foo.tif')
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
projection_wkt = srs.ExportToWkt()
|
|
geotransform = [0, 1.0, 0.0, 0, 0.0, -1.0]
|
|
n = 5
|
|
band1_nodata = numpy.nan
|
|
gtiff_driver = gdal.GetDriverByName('GTiff')
|
|
new_raster = gtiff_driver.Create(
|
|
raster_path, n, n, 2, gdal.GDT_Float32, options=[
|
|
'TILED=YES', 'BIGTIFF=YES', 'COMPRESS=LZW',
|
|
'BLOCKXSIZE=16', 'BLOCKYSIZE=16'])
|
|
new_raster.SetProjection(projection_wkt)
|
|
new_raster.SetGeoTransform(geotransform)
|
|
valid_value = -10.0
|
|
array = numpy.array([valid_value]*n*n).reshape((n, n))
|
|
|
|
# nodata across the top row for Band 1
|
|
new_band = new_raster.GetRasterBand(1)
|
|
array[:1] = band1_nodata
|
|
new_band.WriteArray(array)
|
|
new_band.SetNoDataValue(band1_nodata)
|
|
|
|
# all nodata for Band 2
|
|
band2_nodata = 999.99
|
|
nodata_band = new_raster.GetRasterBand(2)
|
|
array[:] = band2_nodata
|
|
nodata_band.WriteArray(array)
|
|
nodata_band.SetNoDataValue(band2_nodata)
|
|
|
|
new_raster.FlushCache()
|
|
new_band = None
|
|
nodata_band = None
|
|
new_raster = None
|
|
|
|
# Ray is only over valid pixels
|
|
all_valid_ray = ogr.Geometry(ogr.wkbLineString)
|
|
all_valid_ray.AddPoint(0, -2)
|
|
all_valid_ray.AddPoint(3, -4)
|
|
|
|
# Ray is over some valid and some nodata pixels
|
|
some_nodata_ray = ogr.Geometry(ogr.wkbLineString)
|
|
some_nodata_ray.AddPoint(0, 0)
|
|
some_nodata_ray.AddPoint(4, -4)
|
|
|
|
# Ray is only over nodata pixels,
|
|
# so the final extract window will be expanded as needed
|
|
all_nodata_ray = ogr.Geometry(ogr.wkbLineString)
|
|
all_nodata_ray.AddPoint(-0.5, 0.5)
|
|
all_nodata_ray.AddPoint(3.5, 0.5)
|
|
|
|
# Ray is partially outside the bounds of the raster
|
|
out_of_bounds_ray = ogr.Geometry(ogr.wkbLineString)
|
|
out_of_bounds_ray.AddPoint(3, -3)
|
|
out_of_bounds_ray.AddPoint(3, -7)
|
|
|
|
raster = gdal.OpenEx(raster_path, gdal.OF_RASTER)
|
|
band = raster.GetRasterBand(1) # nodata across top row
|
|
|
|
values = coastal_vulnerability.extract_bathymetry_along_ray(
|
|
all_valid_ray, geotransform, band1_nodata, band)
|
|
self.assertEqual(numpy.mean(values), valid_value)
|
|
|
|
values = coastal_vulnerability.extract_bathymetry_along_ray(
|
|
some_nodata_ray, geotransform, band1_nodata, band)
|
|
self.assertEqual(numpy.mean(values), valid_value)
|
|
|
|
values = coastal_vulnerability.extract_bathymetry_along_ray(
|
|
all_nodata_ray, geotransform, band1_nodata, band)
|
|
self.assertEqual(numpy.mean(values), valid_value)
|
|
|
|
with self.assertRaises(ValueError):
|
|
values = coastal_vulnerability.extract_bathymetry_along_ray(
|
|
out_of_bounds_ray, geotransform, band1_nodata, band)
|
|
|
|
nodata_band = raster.GetRasterBand(2) # all nodata band
|
|
with self.assertRaises(ValueError):
|
|
values = coastal_vulnerability.extract_bathymetry_along_ray(
|
|
all_valid_ray, geotransform, band2_nodata, nodata_band)
|
|
|
|
raster = None
|
|
band = None
|
|
nodata_band = None
|
|
|
|
def test_wave_height_and_period(self):
|
|
"""CV: unit test for wave height and period equaitons."""
|
|
from natcap.invest import coastal_vulnerability
|
|
# Testing with some reasonable values
|
|
U = 15.0 # wind (meters / second)
|
|
F = 10000 # fetch distance (meters)
|
|
D = -20.0 # fetch depth (meters)
|
|
height = coastal_vulnerability.compute_wave_height(U, F, D)
|
|
period = coastal_vulnerability.compute_wave_period(U, F, D)
|
|
numpy.testing.assert_allclose(height, 1.0260516, rtol=0, atol=1e-6)
|
|
numpy.testing.assert_allclose(period, 3.6288811, rtol=0, atol=1e-6)
|
|
|
|
# Test with U < 1 and expect U to be forced to 1.0
|
|
F = 10000 # fetch distance (meters)
|
|
D = -20.0 # fetch depth (meters)
|
|
heightA = coastal_vulnerability.compute_wave_height(0.5, F, D)
|
|
heightB = coastal_vulnerability.compute_wave_height(1.0, F, D)
|
|
periodA = coastal_vulnerability.compute_wave_period(0.0, F, D)
|
|
periodB = coastal_vulnerability.compute_wave_period(1.0, F, D)
|
|
numpy.testing.assert_allclose(heightA, heightB, rtol=0, atol=1e-6)
|
|
numpy.testing.assert_allclose(periodA, periodB, rtol=0, atol=1e-6)
|
|
|
|
def test_habitat_rank(self):
|
|
"""CV: regression test for habitat ranks."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, "wwiii_shore_points_5000m.gpkg")
|
|
habitat_table_path = os.path.join(
|
|
INPUT_DATA, "natural_habitats_wcvi.csv")
|
|
target_habitat_protection_path = os.path.join(
|
|
workspace_dir, 'habitat_protection.csv')
|
|
file_suffix = ''
|
|
model_resolution = 5000
|
|
|
|
task_graph = taskgraph.TaskGraph(
|
|
os.path.join(workspace_dir, 'taskgraph_dir'), -1)
|
|
|
|
task_list, pickle_list = coastal_vulnerability._schedule_habitat_tasks(
|
|
base_shore_point_vector_path, habitat_table_path, model_resolution,
|
|
workspace_dir, file_suffix, task_graph)
|
|
|
|
coastal_vulnerability.calculate_habitat_rank(
|
|
pickle_list, target_habitat_protection_path)
|
|
|
|
expected_habitat_path = os.path.join(
|
|
REGRESSION_DATA, 'expected_habitat_protection.csv')
|
|
actual_values_df = pandas.read_csv(target_habitat_protection_path)
|
|
expected_values_df = pandas.read_csv(expected_habitat_path)
|
|
pandas.testing.assert_frame_equal(actual_values_df, expected_values_df)
|
|
|
|
def test_invalid_habitats_outside_search_radius(self):
|
|
"""CV: test habitat search when no valid habitat within range."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
habitat_vector_path = os.path.join(
|
|
workspace_dir, 'invalid_habitat.gpkg')
|
|
_ = make_vector_of_invalid_geoms(habitat_vector_path)
|
|
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, "wwiii_shore_points_5000m.gpkg")
|
|
search_radius = 1 # meter, we don't want to find any habitat.
|
|
habitat_rank = 1
|
|
habitat_id = 'foo'
|
|
target_habitat_pickle_path = os.path.join(
|
|
workspace_dir, 'target.pickle')
|
|
|
|
coastal_vulnerability.search_for_vector_habitat(
|
|
base_shore_point_vector_path, search_radius, habitat_rank,
|
|
habitat_id, habitat_vector_path, target_habitat_pickle_path)
|
|
|
|
with open(target_habitat_pickle_path, 'rb') as file:
|
|
result = pickle.load(file)
|
|
# When no habitat is found near a point, it gets a protection rank of 5
|
|
self.assertTrue(
|
|
set(list(result.values())) == set([5]))
|
|
|
|
def test_geomorphology_rank(self):
|
|
"""CV: regression test for geomorphology values."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
geomorphology_vector_path = os.path.join(
|
|
INPUT_DATA, "geomorphology_few_ranks.shp")
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, "wwiii_shore_points_5000m.gpkg")
|
|
target_pickle_path = os.path.join(
|
|
workspace_dir, 'geomorphology.pickle')
|
|
target_missing_data_path = os.path.join(
|
|
workspace_dir, 'missing_data.gpkg')
|
|
model_resolution = 5000
|
|
|
|
coastal_vulnerability.calculate_geomorphology_exposure(
|
|
geomorphology_vector_path, 3,
|
|
base_shore_point_vector_path,
|
|
model_resolution, target_pickle_path,
|
|
target_missing_data_path)
|
|
|
|
expected_values_pickle_path = os.path.join(
|
|
REGRESSION_DATA, 'expected_geomorphology.json')
|
|
assert_pickled_arrays_almost_equal(
|
|
target_pickle_path, expected_values_pickle_path)
|
|
|
|
def test_creation_of_missing_geomorphology_dataset(self):
|
|
"""CV: test vector is created when some points are missing geomorph."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
geomorphology_vector_path = os.path.join(
|
|
INPUT_DATA, "geomorphology_few_ranks.shp")
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
projection_wkt = srs.ExportToWkt()
|
|
simple_points_path = os.path.join(
|
|
self.workspace_dir, 'simple_points.gpkg')
|
|
# create a point vector very far from any geomorphology segments:
|
|
point = [Point(0.0, 0.0)]
|
|
fields = {'shore_id': ogr.OFTInteger}
|
|
attrs = [{'shore_id': 0}]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
point, simple_points_path, projection_wkt, 'GPKG',
|
|
fields=fields, attribute_list=attrs, ogr_geom_type=ogr.wkbPoint)
|
|
target_pickle_path = os.path.join(
|
|
workspace_dir, 'geomorphology.pickle')
|
|
target_missing_data_path = os.path.join(
|
|
workspace_dir, 'shore_points_missing_geomorphology.gpkg')
|
|
model_resolution = 5 # makes for very small search radius
|
|
|
|
coastal_vulnerability.calculate_geomorphology_exposure(
|
|
geomorphology_vector_path, 3,
|
|
simple_points_path,
|
|
model_resolution, target_pickle_path,
|
|
target_missing_data_path)
|
|
|
|
expected_file_path = os.path.join(
|
|
os.path.dirname(target_pickle_path),
|
|
"shore_points_missing_geomorphology.gpkg")
|
|
vector = gdal.OpenEx(expected_file_path, gdal.OF_VECTOR)
|
|
layer = vector.GetLayer()
|
|
n_features = layer.GetFeatureCount()
|
|
layer = None
|
|
vector = None
|
|
self.assertEqual(len(point), n_features)
|
|
|
|
def test_surge_exposure_rank(self):
|
|
"""CV: regression test for surge exposure values."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, "wwiii_shore_points_5000m.gpkg")
|
|
shelf_contour_path = os.path.join(
|
|
INPUT_DATA, 'continental_shelf_contour.gpkg')
|
|
target_surge_pickle_path = os.path.join(
|
|
workspace_dir, 'surge.pickle')
|
|
coastal_vulnerability.calculate_surge_exposure(
|
|
base_shore_point_vector_path, shelf_contour_path,
|
|
target_surge_pickle_path)
|
|
expected_raw_values_path = os.path.join(
|
|
REGRESSION_DATA, 'expected_surge.json')
|
|
|
|
assert_pickled_arrays_almost_equal(
|
|
target_surge_pickle_path, expected_raw_values_path)
|
|
|
|
def test_no_shelf_contour_near_aoi(self):
|
|
"""CV: test ValueError raised if shelf contour too far from shore."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, "wwiii_shore_points_5000m.gpkg")
|
|
shore_info = pygeoprocessing.get_vector_info(
|
|
base_shore_point_vector_path)
|
|
bbox = shore_info['bounding_box']
|
|
srs_wkt = shore_info['projection_wkt']
|
|
|
|
shelf_contour_path = os.path.join(workspace_dir, 'surge.geojson')
|
|
# Shelf line starts 2000km from bbox, further than the 1500km allowable
|
|
line_a = LineString([
|
|
(bbox[0] - 2e6, bbox[1] - 2e6),
|
|
(bbox[0] - 2e6 - 100, bbox[1] - 2e6 - 100)])
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[line_a], shelf_contour_path, srs_wkt, 'GeoJSON',
|
|
ogr_geom_type=ogr.wkbLineString)
|
|
|
|
target_surge_pickle_path = os.path.join(workspace_dir, 'surge.pickle')
|
|
with self.assertRaises(ValueError) as cm:
|
|
coastal_vulnerability.calculate_surge_exposure(
|
|
base_shore_point_vector_path, shelf_contour_path,
|
|
target_surge_pickle_path)
|
|
actual_message = str(cm.exception)
|
|
expected_message = 'No portion of the shelf contour line'
|
|
self.assertTrue(expected_message in actual_message)
|
|
|
|
def test_surge_multilinestring_geometry(self):
|
|
"""CV: test surge calculation with multipart geometry input."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
aoi_vector_path = os.path.join(
|
|
INPUT_DATA, 'AOI_BarkClay.shp')
|
|
aoi_info = pygeoprocessing.get_vector_info(aoi_vector_path)
|
|
bbox = aoi_info['bounding_box']
|
|
srs_wkt = aoi_info['projection_wkt']
|
|
|
|
fields = {'shore_id': ogr.OFTInteger}
|
|
attributes = [{'shore_id': 0}]
|
|
geometries = [
|
|
Point((bbox[0] + bbox[2]) / 2.0, (bbox[1] + bbox[3]) / 2.0)]
|
|
# Make a shore point in center of AOI bbox
|
|
shore_point_path = os.path.join(workspace_dir, 'shore_point.shp')
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
geometries, shore_point_path, srs_wkt, 'GeoJSON',
|
|
fields=fields, attribute_list=attributes,
|
|
ogr_geom_type=ogr.wkbPoint)
|
|
|
|
# Make surge line as diagonal of AOI bounding box
|
|
# And make it a multi-part geometry to test handling of that case.
|
|
shelf_contour_path = os.path.join(workspace_dir, 'surge.geojson')
|
|
line_a = LineString([(bbox[0], bbox[1]), (bbox[2], bbox[3])])
|
|
line_b = LineString([(bbox[0], bbox[1]), (bbox[2], bbox[3])])
|
|
geometries = [MultiLineString([line_a, line_b])]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
geometries, shelf_contour_path, srs_wkt, 'GeoJSON',
|
|
ogr_geom_type=ogr.wkbMultiLineString)
|
|
|
|
target_surge_pickle_path = os.path.join(
|
|
workspace_dir, 'surge.pickle')
|
|
|
|
coastal_vulnerability.calculate_surge_exposure(
|
|
shore_point_path, shelf_contour_path,
|
|
target_surge_pickle_path)
|
|
|
|
# shore point is at center of AOI bounding box
|
|
# shelf contour is the diagonal of the bounding box
|
|
# So, expected value for distance to shelf contour is 0.0
|
|
with open(target_surge_pickle_path, 'rb') as file:
|
|
expected_vals = pickle.load(file)
|
|
self.assertTrue(expected_vals[0] == 0.0)
|
|
|
|
def test_relief_values(self):
|
|
"""CV: regression test for aggregated relief values."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, 'wwiii_shore_points_5000m.gpkg')
|
|
dem_path = os.path.join(
|
|
INPUT_DATA, 'dem_wgs84.tif')
|
|
target_relief_pickle_path = os.path.join(
|
|
workspace_dir, 'relief.pickle')
|
|
dem_averaging_radius = 20000.0
|
|
model_resolution = 5000.0
|
|
file_suffix = ''
|
|
|
|
coastal_vulnerability.calculate_relief_exposure(
|
|
base_shore_point_vector_path, dem_path, dem_averaging_radius,
|
|
model_resolution, workspace_dir, file_suffix,
|
|
target_relief_pickle_path)
|
|
|
|
expected_raw_values_path = os.path.join(
|
|
REGRESSION_DATA, 'expected_relief.json')
|
|
assert_pickled_arrays_almost_equal(
|
|
target_relief_pickle_path, expected_raw_values_path)
|
|
|
|
def test_population_values(self):
|
|
"""CV: regression test for aggregated population density."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, 'wwiii_shore_points_5000m.gpkg')
|
|
population_path = os.path.join(
|
|
INPUT_DATA, 'population.tif')
|
|
target_population_pickle_path = os.path.join(
|
|
workspace_dir, 'population.pickle')
|
|
search_radius = 20000.0
|
|
model_resolution = 5000.0
|
|
file_suffix = ''
|
|
|
|
coastal_vulnerability.aggregate_population_density(
|
|
base_shore_point_vector_path, population_path, search_radius,
|
|
model_resolution, workspace_dir, file_suffix,
|
|
target_population_pickle_path)
|
|
|
|
expected_raw_values_path = os.path.join(
|
|
REGRESSION_DATA, 'expected_population.json')
|
|
|
|
assert_pickled_arrays_almost_equal(
|
|
target_population_pickle_path, expected_raw_values_path)
|
|
|
|
def test_interpolate_slr(self):
|
|
"""CV: regression test for sea-level rise values.
|
|
|
|
This tests an edge case where there is only one point in the
|
|
SLR dataset, and it requires a coordinate transformation.
|
|
"""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
base_shore_point_vector_path = os.path.join(
|
|
INPUT_DATA, 'wwiii_shore_points_5000m.gpkg')
|
|
|
|
# Make an SLR point vector
|
|
slr_fieldname = 'Trend'
|
|
slr_point_vector_path = os.path.join(
|
|
workspace_dir, 'simple_points.shp')
|
|
out_driver = ogr.GetDriverByName('ESRI Shapefile')
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(4326)
|
|
shapely_feature = Point(-125.65, 49.0)
|
|
out_vector = out_driver.CreateDataSource(slr_point_vector_path)
|
|
layer_name = os.path.basename(
|
|
os.path.splitext(slr_point_vector_path)[0])
|
|
out_layer = out_vector.CreateLayer(layer_name, srs=srs)
|
|
field_defn = ogr.FieldDefn(slr_fieldname, ogr.OFTReal)
|
|
out_layer.CreateField(field_defn)
|
|
layer_defn = out_layer.GetLayerDefn()
|
|
new_feature = ogr.Feature(layer_defn)
|
|
new_geometry = ogr.CreateGeometryFromWkb(shapely_feature.wkb)
|
|
new_feature.SetGeometry(new_geometry)
|
|
new_feature.SetField(slr_fieldname, 1.3)
|
|
out_layer.CreateFeature(new_feature)
|
|
out_layer = None
|
|
out_vector = None
|
|
|
|
target_pickle_path = os.path.join(
|
|
workspace_dir, 'slr.pickle')
|
|
coastal_vulnerability.interpolate_sealevelrise_points(
|
|
base_shore_point_vector_path, slr_point_vector_path,
|
|
slr_fieldname, target_pickle_path)
|
|
|
|
expected_raw_values_path = os.path.join(
|
|
REGRESSION_DATA, 'expected_slr.json')
|
|
|
|
assert_pickled_arrays_almost_equal(
|
|
target_pickle_path, expected_raw_values_path)
|
|
|
|
def test_interpolate_slr_beyond_maxdistance(self):
|
|
"""CV: test sea-level rise returns nan beyond max search distance."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
|
|
# Make a shore point
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
projection_wkt = srs.ExportToWkt()
|
|
shore_point_path = os.path.join(workspace_dir, 'shore_point.shp')
|
|
fields = {'shore_id': ogr.OFTInteger}
|
|
attributes = [{'shore_id': 0}]
|
|
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[Point(0., 0.)], shore_point_path, projection_wkt, 'GeoJSON',
|
|
fields=fields, attribute_list=attributes,
|
|
ogr_geom_type=ogr.wkbPoint)
|
|
|
|
slr_point_vector_path = os.path.join(workspace_dir, 'slr_point.shp')
|
|
slr_fieldname = 'Trend'
|
|
shapely_point = Point(1e6, 1e6) # very far from the shore point
|
|
make_slr_vector(
|
|
slr_point_vector_path, slr_fieldname, shapely_point, srs)
|
|
|
|
target_pickle_path = os.path.join(
|
|
workspace_dir, 'slr.pickle')
|
|
coastal_vulnerability.interpolate_sealevelrise_points(
|
|
shore_point_path, slr_point_vector_path,
|
|
slr_fieldname, target_pickle_path)
|
|
|
|
with open(target_pickle_path, 'rb') as file:
|
|
actual_values = pickle.load(file)
|
|
expected_values = numpy.array([numpy.nan])
|
|
|
|
numpy.testing.assert_allclose(
|
|
list(actual_values.values()), expected_values, rtol=0, atol=1e-4)
|
|
|
|
def test_slr_missing_field(self):
|
|
"""CV: test KeyError raised if slr field is not present in vector."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
# Make a shore point
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
projection_wkt = srs.ExportToWkt()
|
|
shore_point_path = os.path.join(workspace_dir, 'shore_point.shp')
|
|
fields = {'shore_id': ogr.OFTInteger}
|
|
attributes = [{'shore_id': 0}]
|
|
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[Point(0., 0.)], shore_point_path, projection_wkt, 'GeoJSON',
|
|
fields=fields, attribute_list=attributes,
|
|
ogr_geom_type=ogr.wkbPoint)
|
|
|
|
slr_point_vector_path = os.path.join(workspace_dir, 'slr_point.shp')
|
|
slr_fieldname = 'Trend'
|
|
shapely_point = Point(1., 1.)
|
|
make_slr_vector(
|
|
slr_point_vector_path, slr_fieldname, shapely_point, srs)
|
|
nonexistent_field = 'foo'
|
|
|
|
with self.assertRaises(KeyError) as cm:
|
|
coastal_vulnerability.interpolate_sealevelrise_points(
|
|
shore_point_path, slr_point_vector_path,
|
|
nonexistent_field, 'target.pickle')
|
|
actual_message = str(cm.exception)
|
|
expected_message = 'fieldname %s not found in' % nonexistent_field
|
|
self.assertTrue(expected_message in actual_message)
|
|
|
|
def test_aggregate_raster_edge_cases(self):
|
|
"""CV: test literal edge-cases in raster aggregation."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
raster_path = os.path.join(workspace_dir, 'simple_raster.tif')
|
|
target_pickle_path = os.path.join(workspace_dir, 'target.pickle')
|
|
|
|
# Make a simple raster
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
projection_wkt = srs.ExportToWkt()
|
|
geotransform = [0, 0.5, 0.0, 0, 0.0, -0.5]
|
|
n = 5
|
|
nodata_val = -1
|
|
gtiff_driver = gdal.GetDriverByName('GTiff')
|
|
new_raster = gtiff_driver.Create(
|
|
raster_path, n, n, 1, gdal.GDT_Int32, options=[
|
|
'TILED=YES', 'BIGTIFF=YES', 'COMPRESS=LZW',
|
|
'BLOCKXSIZE=16', 'BLOCKYSIZE=16'])
|
|
new_raster.SetProjection(projection_wkt)
|
|
new_raster.SetGeoTransform(geotransform)
|
|
new_band = new_raster.GetRasterBand(1)
|
|
array = numpy.array(range(n**2)).reshape((n, n))
|
|
new_band.WriteArray(array)
|
|
if nodata_val is not None:
|
|
new_band.SetNoDataValue(nodata_val)
|
|
new_raster.FlushCache()
|
|
new_band = None
|
|
new_raster = None
|
|
|
|
search_radius = geotransform[1] * 3
|
|
geometries = [
|
|
Point(0.1, -0.1), # pixel (0,0): kernel upper-left out of bounds
|
|
Point(1.25, -1.25), # pixel (2,2): kernel upper-left & lower-right O.O.B
|
|
Point(2.1, -2.1), # pixel (4,4): kernel lower-right out of bounds
|
|
Point(-10, 10) # entire kernel out of bounds
|
|
]
|
|
fields = {'shore_id': ogr.OFTInteger}
|
|
attributes = [{'shore_id': 0}, {'shore_id': 1},
|
|
{'shore_id': 2}, {'shore_id': 3}]
|
|
# Make a vector proximate to the raster
|
|
simple_points_path = os.path.join(workspace_dir, 'simple_points.geojson')
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
geometries, simple_points_path, projection_wkt, 'GeoJSON',
|
|
fields=fields, attribute_list=attributes,
|
|
ogr_geom_type=ogr.wkbPoint)
|
|
|
|
coastal_vulnerability._aggregate_raster_values_in_radius(
|
|
simple_points_path, raster_path, search_radius, self.workspace_dir,
|
|
target_pickle_path, _mean_op)
|
|
|
|
with open(target_pickle_path, 'rb') as file:
|
|
actual_values = pickle.load(file)
|
|
|
|
expected_values = numpy.array([6.5454, 12.0, 17.4545, numpy.nan])
|
|
|
|
numpy.testing.assert_allclose(
|
|
list(actual_values.values()), expected_values, rtol=0, atol=1e-4)
|
|
|
|
def test_nodata_raster_aggregation(self):
|
|
"""CV: test raster aggregation over entirely nodata returns nan."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
raster_path = os.path.join(workspace_dir, 'nodata_raster.tif')
|
|
target_pickle_path = os.path.join(workspace_dir, 'target.pickle')
|
|
sample_distance = 1.5
|
|
|
|
# Make a simple raster filled with all nodata
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
projection_wkt = srs.ExportToWkt()
|
|
geotransform = [0, 0.5, 0.0, 0, 0.0, -0.5]
|
|
n = 5
|
|
nodata_val = -1
|
|
gtiff_driver = gdal.GetDriverByName('GTiff')
|
|
new_raster = gtiff_driver.Create(
|
|
raster_path, n, n, 1, gdal.GDT_Int32, options=[
|
|
'TILED=YES', 'BIGTIFF=YES', 'COMPRESS=LZW',
|
|
'BLOCKXSIZE=16', 'BLOCKYSIZE=16'])
|
|
new_raster.SetProjection(projection_wkt)
|
|
new_raster.SetGeoTransform(geotransform)
|
|
new_band = new_raster.GetRasterBand(1)
|
|
array = numpy.array([nodata_val] * n**2).reshape((n, n))
|
|
new_band.WriteArray(array)
|
|
if nodata_val is not None:
|
|
new_band.SetNoDataValue(nodata_val)
|
|
new_raster.FlushCache()
|
|
new_band = None
|
|
new_raster = None
|
|
|
|
# Make a vector proximate to the raster
|
|
simple_points_path = os.path.join(workspace_dir, 'simple_points.shp')
|
|
fields = {'shore_id': ogr.OFTInteger}
|
|
attributes = [{'shore_id': 0}]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[Point(0., 0.)], simple_points_path, projection_wkt, 'GeoJSON',
|
|
fields=fields, attribute_list=attributes,
|
|
ogr_geom_type=ogr.wkbPoint)
|
|
|
|
coastal_vulnerability._aggregate_raster_values_in_radius(
|
|
simple_points_path, raster_path, sample_distance,
|
|
self.workspace_dir, target_pickle_path, _mean_op)
|
|
|
|
with open(target_pickle_path, 'rb') as file:
|
|
actual_values = pickle.load(file)
|
|
|
|
expected_values = numpy.array([numpy.nan])
|
|
numpy.testing.assert_allclose(
|
|
list(actual_values.values()), expected_values, rtol=0, atol=1e-4)
|
|
|
|
def test_complete_run(self):
|
|
"""CV: regression test for a complete run w/ all optional arguments."""
|
|
from natcap.invest import coastal_vulnerability
|
|
args = CoastalVulnerabilityTests.generate_base_args(self.workspace_dir)
|
|
# these optional args aren't included in base_args:
|
|
args['geomorphology_vector_path'] = os.path.join(
|
|
INPUT_DATA, 'geomorphology_few_ranks.shp')
|
|
args['geomorphology_fill_value'] = 3
|
|
args['population_raster_path'] = os.path.join(
|
|
INPUT_DATA, 'population.tif')
|
|
args['population_radius'] = 16000
|
|
args['slr_vector_path'] = os.path.join(
|
|
INPUT_DATA, 'sea_level_rise.gpkg')
|
|
args['slr_field'] = 'Trend'
|
|
coastal_vulnerability.execute(args)
|
|
|
|
results_vector = gdal.OpenEx(
|
|
os.path.join(args['workspace_dir'], 'coastal_exposure.gpkg'),
|
|
gdal.OF_VECTOR)
|
|
results_layer = results_vector.GetLayer()
|
|
|
|
# shore points aren't necessarily created/numbered in the same order,
|
|
# so we can't compare by the fid or shore_id fields
|
|
# instead check that each point has the expected attributes
|
|
fields = [
|
|
'R_hab', 'R_wind', 'R_wave', 'R_surge', 'R_relief', 'R_geomorph',
|
|
'R_slr', 'population', 'exposure', 'habitat_role',
|
|
'exposure_no_habitats']
|
|
|
|
# sorted list of expected x, y point coords and their corresponding expected
|
|
# values for the fields in the order above
|
|
sorted_expected_data = [
|
|
[(252692.2222200262, 5482540.950248547),
|
|
[5, 3, 2, 1, 4, 1, 5, 0, 2.49389836245, 0, 2.49389836245]],
|
|
[(265195.9118871244, 5465342.359978485),
|
|
[4.05, 5, 5, 1, 5, 1, 5, 0, 3.063308387, 0.0936167899, 3.1569251777]],
|
|
[(268404.5296770451, 5479539.256472221),
|
|
[5, 2, 2, 1, 1, 1, 5, 0, 1.9306977288, 0, 1.9306977288]],
|
|
[(275802.584962168, 5469283.47691652),
|
|
[5, 3, 1, 1, 5, 1, 4, 0, 2.2587827631, 0, 2.2587827631]],
|
|
[(286971.1600469994, 5475571.525391179),
|
|
[5, 1, 1, 3, 1, 1, 4, 0, 1.7948229213, 0, 1.7948229213]],
|
|
[(290844.10673833836, 5460752.369983306),
|
|
[5, 4, 2, 2, 2, 1, 4, 0, 2.5169979012, 0, 2.5169979012]],
|
|
[(300130.0010361069, 5437435.4230010025),
|
|
[1.7999999, 4, 5, 2, 5, 1, 3, 0, 2.712353253, 0.426215219066, 3.138568472]],
|
|
[(306112.5574508078, 5450095.8865171345),
|
|
[5, 1, 3, 3, 2, 1, 3, 0, 2.225039271, 0, 2.225039271]],
|
|
[(318255.8192637532, 5423278.964182078),
|
|
[5, 4, 3, 2, 3, 2.5, 1, 0, 2.6426195539, 0, 2.6426195539]],
|
|
[(339388.60793905176, 5428895.077843302),
|
|
[5, 3, 4, 4, 2, 4, 1, 0, 2.94471340036, 0, 2.94471340036]],
|
|
[(344736.25795214524, 5411347.428706937),
|
|
[5, 2, 4, 3, 3, 4, 1, 0, 2.8261463109, 0, 2.8261463109]],
|
|
[(355807.04065901926, 5394736.771414153),
|
|
[5, 5, 4, 4, 3, 4, 2, 0, 3.70591872429, 0, 3.70591872429]],
|
|
[(361290.81087879254, 5427975.474574804),
|
|
[5, 1, 3, 5, 1, 4, 2, 0, 2.493898362454, 0, 2.493898362454]],
|
|
[(361341.1463245464, 5433678.995435326),
|
|
[5, 2, 1, 5, 1, 4, 1, 0, 2.1316631165, 0, 2.1316631165]],
|
|
[(368269.9048903524, 5449541.99543876),
|
|
[5, 1, 1, 5, 2, 4, 2, 0.008, 2.3535468936, 0, 2.3535468936]],
|
|
[(376479.9819538344, 5383636.197270025),
|
|
[4.05, 5, 5, 4, 4, 4, 3, numpy.nan, 4.0989337064, 0.125266204816, 4.2241999112]]
|
|
]
|
|
|
|
actual_data = []
|
|
for feature in results_layer:
|
|
geom = feature.GetGeometryRef()
|
|
x = geom.GetX()
|
|
y = geom.GetY()
|
|
actual_row = []
|
|
for field in fields:
|
|
value = feature.GetField(field)
|
|
if value is None:
|
|
actual_row.append(numpy.nan)
|
|
else:
|
|
actual_row.append(value)
|
|
actual_data.append([(x, y), actual_row])
|
|
|
|
# the coords and corresponding field values should be close to expected
|
|
sorted_actual_data = sorted(actual_data)
|
|
for actual, expected in zip(sorted_actual_data, sorted_expected_data):
|
|
actual_coords, actual_data = actual[0], actual[1:]
|
|
expected_coords, expected_data = expected[0], expected[1:]
|
|
numpy.testing.assert_allclose(actual_coords, expected_coords)
|
|
numpy.testing.assert_allclose(actual_data, expected_data)
|
|
|
|
# Also expect matching shore_id field in all tabular outputs:
|
|
actual_values_df = pandas.read_csv(
|
|
os.path.join(args['workspace_dir'], 'coastal_exposure.csv'))
|
|
intermediate_df = pandas.read_csv(
|
|
os.path.join(
|
|
args['workspace_dir'],
|
|
'intermediate/intermediate_exposure.csv'))
|
|
habitat_df = pandas.read_csv(
|
|
os.path.join(
|
|
args['workspace_dir'],
|
|
'intermediate/habitats/habitat_protection.csv'))
|
|
pandas.testing.assert_series_equal(
|
|
actual_values_df['shore_id'], intermediate_df['shore_id'])
|
|
pandas.testing.assert_series_equal(
|
|
actual_values_df['shore_id'], habitat_df['shore_id'])
|
|
|
|
def test_final_risk_calc(self):
|
|
"""CV: regression test for the final risk score calculation."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
|
|
target_point_vector_path = os.path.join(
|
|
workspace_dir, 'coastal_exposure.gpkg')
|
|
target_point_csv_path = os.path.join(
|
|
workspace_dir, 'coastal_exposure.csv')
|
|
|
|
# Points with ranks for the final equation. Also includes a field
|
|
# without the R_ prefix, which final equation should ignore.
|
|
base_vector_path = os.path.join(
|
|
REGRESSION_DATA, 'coastal_exposure.gpkg')
|
|
|
|
# This input gets modified in place, so first copy to working dir
|
|
base_shore_point_vector = gdal.OpenEx(base_vector_path, gdal.OF_VECTOR)
|
|
gdal.VectorTranslate(target_point_vector_path, base_shore_point_vector)
|
|
|
|
coastal_vulnerability.calculate_final_risk(
|
|
target_point_vector_path, target_point_csv_path)
|
|
|
|
actual_values_df = pandas.read_csv(target_point_csv_path)
|
|
expected_values_df = pandas.read_csv(
|
|
os.path.join(REGRESSION_DATA, 'expected_final_risk.csv'))
|
|
pandas.testing.assert_frame_equal(
|
|
actual_values_df, expected_values_df, check_dtype=False)
|
|
|
|
def test_geometric_mean_with_nan(self):
|
|
"""CV: test geometric mean function retuns `nan` with missing data."""
|
|
from natcap.invest import coastal_vulnerability
|
|
array = numpy.array([1, 1, 1, 1, None], dtype=float)
|
|
result = coastal_vulnerability._geometric_mean(array)
|
|
self.assertTrue(numpy.isnan(result))
|
|
|
|
def test_final_risk_calc_with_missing_data(self):
|
|
"""CV: test final risk calculation w/ missing data.
|
|
|
|
Test missing data at feature propogates to empty field in output.
|
|
"""
|
|
from natcap.invest import coastal_vulnerability
|
|
target_vector_path = os.path.join(self.workspace_dir, 'target.gpkg')
|
|
target_csv_path = os.path.join(self.workspace_dir, 'target.csv')
|
|
|
|
# This gpkg has a feature with an empty field value for 'R_slr'
|
|
# The function modifies the file in place, so copy to test workspace
|
|
# first.
|
|
base_vector_path = os.path.join(
|
|
REGRESSION_DATA, 'test_missing_values.gpkg')
|
|
gdal.VectorTranslate(target_vector_path, base_vector_path)
|
|
|
|
coastal_vulnerability.calculate_final_risk(
|
|
target_vector_path, target_csv_path)
|
|
actual_values_df = pandas.read_csv(target_csv_path)
|
|
|
|
# These fields should have missing values after the final calculations
|
|
na_cols = ['exposure', 'habitat_role', 'exposure_no_habitats']
|
|
na_data = [numpy.nan] * 3
|
|
expected_df = pandas.DataFrame([na_data], columns=na_cols)
|
|
|
|
pandas.testing.assert_frame_equal(
|
|
actual_values_df[na_cols], expected_df)
|
|
|
|
def test_binning_with_missing_data(self):
|
|
"""CV: test binning continuous values to ranks, w/ missing values."""
|
|
from natcap.invest import coastal_vulnerability
|
|
n = 50
|
|
mask = [10, 20, 30, 40]
|
|
keys = range(n)
|
|
numpy.random.seed(0)
|
|
values = numpy.random.uniform(0, 1, n)
|
|
values[mask] = numpy.nan
|
|
missing_values_dict = dict(zip(keys, values))
|
|
|
|
ranks_dict = coastal_vulnerability._bin_values_to_percentiles(
|
|
missing_values_dict)
|
|
|
|
# with random uniform values, all 5 ranks should be present
|
|
expected_ranks = set([1, 2, 3, 4, 5])
|
|
self.assertTrue(expected_ranks.issubset(ranks_dict.values()))
|
|
# and the masked indices should be nans
|
|
self.assertTrue(
|
|
all(numpy.isnan(numpy.array(list(ranks_dict.values()))[mask])))
|
|
|
|
def test_calc_habitat_rank_with_missing_values(self):
|
|
"""CV: test calc Rhab when a habitat has a missing value."""
|
|
from natcap.invest import coastal_vulnerability
|
|
rank = coastal_vulnerability._calc_Rhab([3, 3, 3, numpy.nan])
|
|
self.assertTrue(numpy.isnan(rank))
|
|
|
|
def test_positive_dem_with_nodata_floats(self):
|
|
"""CV: test coercing negative DEM values to zero.
|
|
|
|
More specifically, test the edge case where the input values are
|
|
floats and very large/small values that overflow an Int16.
|
|
|
|
"""
|
|
from natcap.invest import coastal_vulnerability
|
|
n = 5
|
|
nodata_val = -99999
|
|
|
|
array = numpy.array(
|
|
[nodata_val] * n**2, dtype=numpy.float32).reshape((n, n))
|
|
|
|
pos_array = coastal_vulnerability.zero_negative_values(
|
|
array, nodata_val)
|
|
# It's all nodata going in, so should be all same nodata out.
|
|
numpy.testing.assert_array_equal(array, pos_array)
|
|
|
|
def test_dem_undefined_nodata(self):
|
|
"""CV: test coercing negative DEM values to zero, undefined nodata."""
|
|
from natcap.invest import coastal_vulnerability
|
|
n = 5
|
|
nodata_val = -99999
|
|
|
|
array = numpy.array(
|
|
[nodata_val] * n**2, dtype=numpy.float32).reshape((n, n))
|
|
|
|
pos_array = coastal_vulnerability.zero_negative_values(
|
|
array, None)
|
|
# It's all negative going in, so should be all 0s out.
|
|
numpy.testing.assert_array_equal(numpy.zeros_like(array), pos_array)
|
|
|
|
def test_shore_points_on_single_polygon(self):
|
|
"""CV: test shore point creation with single polygon landmass."""
|
|
from natcap.invest import coastal_vulnerability
|
|
aoi_path = os.path.join(self.workspace_dir, 'aoi.geojson')
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
wkt = srs.ExportToWkt()
|
|
|
|
aoi_geometries = [Polygon([
|
|
(-200, -200), (200, -200), (200, 200), (-200, 200), (-200, -200)])]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
aoi_geometries, aoi_path, wkt, 'GeoJSON')
|
|
|
|
landmass_path = os.path.join(
|
|
self.workspace_dir, 'landmass.geojson')
|
|
|
|
landmass_geometries = [Polygon([
|
|
(-100, -100), (100, -100), (100, 100), (-100, 100), (-100, -100)])]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
landmass_geometries, landmass_path, wkt, 'GeoJSON')
|
|
|
|
model_resolution = 100
|
|
|
|
polygon_pickle = os.path.join(
|
|
self.workspace_dir, 'polygon.pickle')
|
|
lines_pickle = os.path.join(
|
|
self.workspace_dir, 'lines.pickle')
|
|
lines_rtree = os.path.join(
|
|
self.workspace_dir, 'rtree.dat')
|
|
target_vector_path = os.path.join(
|
|
self.workspace_dir, 'shore_points.gpkg')
|
|
|
|
coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points(
|
|
aoi_path, landmass_path, model_resolution, target_vector_path,
|
|
polygon_pickle, lines_pickle, lines_rtree)
|
|
|
|
vector = gdal.OpenEx(
|
|
target_vector_path, gdal.OF_VECTOR | gdal.GA_ReadOnly)
|
|
layer = vector.GetLayer()
|
|
n_points = layer.GetFeatureCount()
|
|
self.assertTrue(n_points == 8)
|
|
|
|
def test_shore_points_on_multi_polygon(self):
|
|
"""CV: test shore point creation with multipolygon landmass."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
|
|
aoi_path = os.path.join(workspace_dir, 'aoi.geojson')
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
wkt = srs.ExportToWkt()
|
|
|
|
aoi_geometries = [Polygon([
|
|
(-200, -200), (200, -200), (200, 200), (-200, 200), (-200, -200)])]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
aoi_geometries, aoi_path, wkt, 'GeoJSON')
|
|
|
|
landmass_path = os.path.join(workspace_dir, 'landmass.geojson')
|
|
poly_a = Polygon([
|
|
(-200, -200), (-100, -200), (-100, -100), (-200, -100),
|
|
(-200, -200)])
|
|
poly_b = Polygon([
|
|
(100, 100), (200, 100), (200, 200), (100, 200), (100, 100)])
|
|
landmass_geometries = [poly_a, poly_b, MultiPolygon([poly_a, poly_b])]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
landmass_geometries, landmass_path, wkt, 'GeoJSON',
|
|
ogr_geom_type=ogr.wkbUnknown)
|
|
|
|
model_resolution = 100
|
|
|
|
polygon_pickle = os.path.join(
|
|
workspace_dir, 'polygon.pickle')
|
|
lines_pickle = os.path.join(
|
|
workspace_dir, 'lines.pickle')
|
|
lines_rtree = os.path.join(
|
|
workspace_dir, 'rtree.dat')
|
|
target_vector_path = os.path.join(
|
|
workspace_dir, 'shore_points.gpkg')
|
|
|
|
coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points(
|
|
aoi_path, landmass_path, model_resolution, target_vector_path,
|
|
polygon_pickle, lines_pickle, lines_rtree)
|
|
|
|
vector = gdal.OpenEx(
|
|
target_vector_path, gdal.OF_VECTOR | gdal.GA_ReadOnly)
|
|
layer = vector.GetLayer()
|
|
n_points = layer.GetFeatureCount()
|
|
self.assertTrue(n_points == 8)
|
|
|
|
def test_zero_shorepoints_created(self):
|
|
"""CV: test RuntimeError raised if zero shore points are created."""
|
|
from natcap.invest import coastal_vulnerability
|
|
aoi_path = os.path.join(self.workspace_dir, 'aoi.geojson')
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
wkt = srs.ExportToWkt()
|
|
a = 100
|
|
|
|
aoi_geometries = [Polygon([
|
|
(-a, -a), (a, -a), (a, a), (-a, a), (-a, -a)])]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
aoi_geometries, aoi_path, wkt, 'GeoJSON')
|
|
|
|
landmass_path = os.path.join(self.workspace_dir, 'landmass.geojson')
|
|
# This lanmass fully encloses the AOI, but has no edges within the AOI.
|
|
# Such a landmass would pass validation's spatial overlap check.
|
|
b = a * 2
|
|
|
|
landmass_geometries = [Polygon([
|
|
(-b, -b), (b, -b), (b, b), (-b, b), (-b, -b)])]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
landmass_geometries, landmass_path, wkt, 'GeoJSON')
|
|
|
|
model_resolution = 5
|
|
|
|
polygon_pickle = os.path.join(
|
|
self.workspace_dir, 'polygon.pickle')
|
|
lines_pickle = os.path.join(
|
|
self.workspace_dir, 'lines.pickle')
|
|
lines_rtree = os.path.join(
|
|
self.workspace_dir, 'rtree.dat')
|
|
target_vector_path = os.path.join(
|
|
self.workspace_dir, 'shore_points.gpkg')
|
|
|
|
with self.assertRaises(RuntimeError):
|
|
coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points(
|
|
aoi_path, landmass_path, model_resolution, target_vector_path,
|
|
polygon_pickle, lines_pickle, lines_rtree)
|
|
|
|
def test_aoi_multiple_features(self):
|
|
"""CV: test shore point creation in AOI with multiple features."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
|
|
aoi_path = os.path.join(workspace_dir, 'aoi.geojson')
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
wkt = srs.ExportToWkt()
|
|
# These two disjoint AOI polygons intersect the same landmass line
|
|
# segment. This tests an edge case where a MultiLineString
|
|
# geometry is created when landmass lines are clipped by the AOI.
|
|
poly_a = Polygon([
|
|
(-200, -200), (-100, -200), (-100, -100), (-200, -100),
|
|
(-200, -200)])
|
|
poly_b = Polygon([
|
|
(100, -200), (200, -200), (200, -100), (100, -100),
|
|
(100, -200)])
|
|
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[poly_a, poly_b], aoi_path, wkt, 'GeoJSON')
|
|
|
|
landmass_path = os.path.join(workspace_dir, 'landmass.geojson')
|
|
|
|
landmass_geometries = [Polygon([
|
|
(-190, -190), (190, -190), (190, 190), (-190, 190), (-190, -190)])]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
landmass_geometries, landmass_path, wkt, 'GeoJSON')
|
|
|
|
model_resolution = 80
|
|
|
|
polygon_pickle = os.path.join(
|
|
workspace_dir, 'polygon.pickle')
|
|
lines_pickle = os.path.join(
|
|
workspace_dir, 'lines.pickle')
|
|
lines_rtree = os.path.join(
|
|
workspace_dir, 'rtree.dat')
|
|
target_vector_path = os.path.join(
|
|
workspace_dir, 'shore_points.gpkg')
|
|
|
|
coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points(
|
|
aoi_path, landmass_path, model_resolution, target_vector_path,
|
|
polygon_pickle, lines_pickle, lines_rtree)
|
|
|
|
vector = gdal.OpenEx(
|
|
target_vector_path, gdal.OF_VECTOR | gdal.GA_ReadOnly)
|
|
layer = vector.GetLayer()
|
|
n_points = layer.GetFeatureCount()
|
|
self.assertTrue(n_points == 6)
|
|
|
|
def test_landmass_interior_holes(self):
|
|
"""CV: test no shore points created on interior holes."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
|
|
aoi_path = os.path.join(workspace_dir, 'aoi.geojson')
|
|
landmass_path = os.path.join(workspace_dir, 'landmass.geojson')
|
|
polygon_pickle = os.path.join(
|
|
workspace_dir, 'polygon.pickle')
|
|
lines_pickle = os.path.join(
|
|
workspace_dir, 'lines.pickle')
|
|
lines_rtree = os.path.join(
|
|
workspace_dir, 'rtree.dat')
|
|
target_vector_path = os.path.join(
|
|
workspace_dir, 'shore_points.gpkg')
|
|
model_resolution = 100
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
wkt = srs.ExportToWkt()
|
|
|
|
aoi_poly = Polygon([
|
|
(-200, -200), (200, -200), (200, 200), (-200, 200), (-200, -200)])
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[aoi_poly], aoi_path, wkt, 'GeoJSON')
|
|
|
|
def count_shore_points(landmass_geometries):
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[landmass_geometries], landmass_path, wkt, 'GeoJSON')
|
|
|
|
coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points(
|
|
aoi_path, landmass_path, model_resolution, target_vector_path,
|
|
polygon_pickle, lines_pickle, lines_rtree)
|
|
|
|
vector = gdal.OpenEx(
|
|
target_vector_path, gdal.OF_VECTOR | gdal.GA_ReadOnly)
|
|
layer = vector.GetLayer()
|
|
return layer.GetFeatureCount()
|
|
|
|
# Count points created by a landmass with no holes, this will
|
|
# be the expected count for the landmass with holes.
|
|
exterior = [
|
|
(-190, -190), (190, -190), (190, 190), (-190, 190), (-190, -190)]
|
|
landmass_poly_no_holes = Polygon(exterior)
|
|
n_points_expected = count_shore_points(landmass_poly_no_holes)
|
|
|
|
interior = [
|
|
(-100, -100), (100, -100), (100, 100), (-100, 100), (-100, -100)]
|
|
landmass_poly_holes = Polygon(exterior, [interior])
|
|
n_points_actual = count_shore_points(landmass_poly_holes)
|
|
|
|
self.assertEqual(n_points_expected, n_points_actual)
|
|
|
|
def test_landmass_with_redundant_vertices(self):
|
|
"""CV: test landmass processing with redundant landmass vertices."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
|
|
aoi_path = os.path.join(workspace_dir, 'aoi.geojson')
|
|
landmass_path = os.path.join(workspace_dir, 'landmass.geojson')
|
|
polygon_pickle = os.path.join(
|
|
workspace_dir, 'polygon.pickle')
|
|
lines_pickle = os.path.join(
|
|
workspace_dir, 'lines.pickle')
|
|
lines_rtree = os.path.join(
|
|
workspace_dir, 'rtree.dat')
|
|
target_vector_path = os.path.join(
|
|
workspace_dir, 'shore_points.gpkg')
|
|
model_resolution = 1
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(32634) # UTM Zone 34N
|
|
wkt = srs.ExportToWkt()
|
|
|
|
# Nearly identical vertices are from real data that raised an error
|
|
landmass_coords = [
|
|
(420274.0, 6042938.0),
|
|
(420275.7984616568, 6042939.285698998), # nearly identical
|
|
(420275.7984616570, 6042939.285698998), # nearly identical
|
|
(420276.0, 6042938.0),
|
|
(420274.0, 6042938.0)]
|
|
landmass_geometry = Polygon(landmass_coords)
|
|
aoi_poly = Point(420274.0, 6042938.0).buffer(10) # contains landmass
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[aoi_poly], aoi_path, wkt, 'GeoJSON')
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[landmass_geometry], landmass_path, wkt, 'GeoJSON')
|
|
|
|
coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points(
|
|
aoi_path, landmass_path, model_resolution, target_vector_path,
|
|
polygon_pickle, lines_pickle, lines_rtree)
|
|
|
|
# If we exclude the landmass segment with identical start & end points
|
|
# We should end up with this many segments in the prepared landmass
|
|
with open(lines_pickle, 'rb') as pickle_file:
|
|
lines = pickle.load(pickle_file)
|
|
self.assertEqual(len(lines), 3)
|
|
|
|
def test_aoi_invalid_geometry(self):
|
|
"""CV: test shore point creation with invalid geom in AOI."""
|
|
from natcap.invest import coastal_vulnerability
|
|
aoi_path = os.path.join(self.workspace_dir, 'aoi.gpkg')
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
wkt = srs.ExportToWkt()
|
|
_ = make_vector_of_invalid_geoms(aoi_path)
|
|
|
|
# In this test, it won't matter if the landmass intersects the AOI.
|
|
landmass_path = os.path.join(
|
|
self.workspace_dir, 'landmass.geojson')
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[Polygon([(-100, -100), (100, -100), (100, 100),
|
|
(-100, 100), (-100, -100)])],
|
|
landmass_path, wkt, 'GeoJSON')
|
|
|
|
model_resolution = 100
|
|
|
|
polygon_pickle = os.path.join(
|
|
self.workspace_dir, 'polygon.pickle')
|
|
lines_pickle = os.path.join(
|
|
self.workspace_dir, 'lines.pickle')
|
|
lines_rtree = os.path.join(
|
|
self.workspace_dir, 'rtree.dat')
|
|
target_vector_path = os.path.join(
|
|
self.workspace_dir, 'shore_points.gpkg')
|
|
|
|
# Getting to the RuntimeError for zero shore points means
|
|
# all the invalid geometries in the AOI were handled.
|
|
with self.assertRaises(RuntimeError):
|
|
coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points(
|
|
aoi_path, landmass_path, model_resolution, target_vector_path,
|
|
polygon_pickle, lines_pickle, lines_rtree)
|
|
|
|
def test_prepare_landmass_invalid_geometry(self):
|
|
"""CV: test handling invalid geometries in landmass vector."""
|
|
from natcap.invest import coastal_vulnerability
|
|
aoi_path = os.path.join(self.workspace_dir, 'aoi.geojson')
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
wkt = srs.ExportToWkt()
|
|
|
|
aoi_geometries = [Polygon([
|
|
(-200, -200), (200, -200), (200, 200), (-200, 200), (-200, -200)])]
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
aoi_geometries, aoi_path, wkt, 'GeoJSON')
|
|
|
|
landmass_vector_path = os.path.join(self.workspace_dir, 'vector.gpkg')
|
|
n_features = make_vector_of_invalid_geoms(landmass_vector_path)
|
|
|
|
target_polygon_pickle_path = os.path.join(
|
|
self.workspace_dir, 'polygon.pickle')
|
|
target_lines_pickle_path = os.path.join(
|
|
self.workspace_dir, 'lines.pickle')
|
|
target_rtree_path = os.path.join(self.workspace_dir, 'rtree.dat')
|
|
# Create rtree files to exercise the function's logic of removing
|
|
# pre-exisiting files
|
|
target_rtree_path_base = os.path.splitext(target_rtree_path)[0]
|
|
open(target_rtree_path, 'a').close()
|
|
open(target_rtree_path_base + '.idx', 'a').close()
|
|
|
|
model_resolution = 100
|
|
target_vector_path = os.path.join(
|
|
self.workspace_dir, 'temp-shore-pts.gpkg')
|
|
coastal_vulnerability.prepare_landmass_line_index_and_interpolate_shore_points(
|
|
aoi_path, landmass_vector_path, model_resolution,
|
|
target_vector_path, target_polygon_pickle_path,
|
|
target_lines_pickle_path, target_rtree_path)
|
|
|
|
with open(target_polygon_pickle_path, 'rb') as polygon_file:
|
|
shapely_geom = pickle.load(polygon_file)
|
|
|
|
# Expect 1 input geometry to be skipped, and the rest to be in
|
|
# shapely_geom_list.
|
|
self.assertTrue(len(shapely_geom.geoms) == n_features - 1)
|
|
|
|
def test_no_wwiii_coverage(self):
|
|
"""CV: test exception when shore points are outside max wwiii dist."""
|
|
from natcap.invest import coastal_vulnerability
|
|
args = CoastalVulnerabilityTests.generate_base_args(self.workspace_dir)
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
projection_wkt = srs.ExportToWkt()
|
|
|
|
simple_points_path = os.path.join(
|
|
self.workspace_dir, 'simple_points.shp')
|
|
|
|
fields = {'shore_id': ogr.OFTInteger}
|
|
attributes = [{'shore_id': 0}]
|
|
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
[Point(0.0, 0.0)], simple_points_path, projection_wkt, 'GeoJSON',
|
|
fields=fields, attribute_list=attributes,
|
|
ogr_geom_type=ogr.wkbPoint)
|
|
|
|
target_path = os.path.join(self.workspace_dir, 'target.gpkg')
|
|
with self.assertRaises(ValueError):
|
|
coastal_vulnerability.interpolate_wwiii_to_shore(
|
|
simple_points_path, args['wwiii_vector_path'],
|
|
target_path)
|
|
|
|
def test_projected_wwiii_input(self):
|
|
"""CV: test wwiii interpolation with a projected wwiii input vector."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
args = CoastalVulnerabilityTests.generate_base_args(workspace_dir)
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
projection_wkt = srs.ExportToWkt()
|
|
|
|
projected_wwiii_vector_path = os.path.join(
|
|
workspace_dir, 'prj_wwiii.shp')
|
|
pygeoprocessing.reproject_vector(
|
|
args['wwiii_vector_path'], srs.ExportToWkt(),
|
|
projected_wwiii_vector_path)
|
|
|
|
bbox = pygeoprocessing.get_vector_info(
|
|
projected_wwiii_vector_path)['bounding_box']
|
|
simple_points_path = os.path.join(workspace_dir, 'simple_points.shp')
|
|
|
|
geometries = [Point(
|
|
(bbox[0] + bbox[2]) / 2.0, (bbox[1] + bbox[3]) / 2.0)]
|
|
fields = {'shore_id': ogr.OFTInteger}
|
|
attributes = [{'shore_id': 0}]
|
|
|
|
pygeoprocessing.shapely_geometry_to_vector(
|
|
geometries, simple_points_path, projection_wkt, 'GeoJSON',
|
|
fields=fields, attribute_list=attributes,
|
|
ogr_geom_type=ogr.wkbPoint)
|
|
|
|
target_path = os.path.join(workspace_dir, 'target.gpkg')
|
|
coastal_vulnerability.interpolate_wwiii_to_shore(
|
|
simple_points_path, projected_wwiii_vector_path,
|
|
target_path)
|
|
|
|
vector = gdal.OpenEx(target_path, gdal.OF_VECTOR | gdal.GA_ReadOnly)
|
|
layer = vector.GetLayer()
|
|
layer_defn = layer.GetLayerDefn()
|
|
n_fields = layer_defn.GetFieldCount()
|
|
self.assertTrue(n_fields == 126)
|
|
|
|
# Just spot-checking one field here. Overall correctness is verified by
|
|
# other regression tests.
|
|
feature = layer.GetFeature(1) # gpkg FID index = 1
|
|
value = feature.GetField('REI_PCT90')
|
|
self.assertTrue(numpy.isclose(value, 0.0146246))
|
|
|
|
feature = None
|
|
layer_defn = None
|
|
layer = None
|
|
vector = None
|
|
|
|
def test_clip_project_already_projected_raster(self):
|
|
"""CV: test clip_and_project_raster on an already projected raster."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
base_raster_path = os.path.join(workspace_dir, 'nodata_raster.tif')
|
|
target_raster_path = os.path.join(workspace_dir, 'target.tiff')
|
|
|
|
# Make a simple raster in a projected SRS
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(26910) # UTM Zone 10N
|
|
projection_wkt = srs.ExportToWkt()
|
|
geotransform = [0, 0.5, 0.0, 0, 0.0, -0.5]
|
|
n = 5
|
|
nodata_val = -1
|
|
gtiff_driver = gdal.GetDriverByName('GTiff')
|
|
new_raster = gtiff_driver.Create(
|
|
base_raster_path, n, n, 1, gdal.GDT_Int32, options=[
|
|
'TILED=YES', 'BIGTIFF=YES', 'COMPRESS=LZW',
|
|
'BLOCKXSIZE=16', 'BLOCKYSIZE=16'])
|
|
new_raster.SetProjection(projection_wkt)
|
|
new_raster.SetGeoTransform(geotransform)
|
|
new_band = new_raster.GetRasterBand(1)
|
|
array = numpy.array([nodata_val] * n**2).reshape((n, n))
|
|
new_band.WriteArray(array)
|
|
if nodata_val is not None:
|
|
new_band.SetNoDataValue(nodata_val)
|
|
new_raster.FlushCache()
|
|
new_band = None
|
|
new_raster = None
|
|
|
|
raster_info = pygeoprocessing.get_raster_info(base_raster_path)
|
|
clipping_box = raster_info['bounding_box']
|
|
target_srs_wkt = raster_info['projection_wkt']
|
|
model_resolution = 9 # expect this to be ignored
|
|
file_suffix = ''
|
|
|
|
coastal_vulnerability.clip_and_project_raster(
|
|
base_raster_path, clipping_box, target_srs_wkt, model_resolution,
|
|
workspace_dir, file_suffix, target_raster_path)
|
|
|
|
# If the input was unprojected, the output pixel size would be the
|
|
# model_resolution. Instead, here we expect pixel_size to remain the
|
|
# same as the original base raster.
|
|
actual_pixel_size = pygeoprocessing.get_raster_info(
|
|
target_raster_path)['pixel_size']
|
|
self.assertTrue(
|
|
actual_pixel_size == (geotransform[1], geotransform[5]))
|
|
|
|
def test_clip_project_vector_on_invalid_geometry(self):
|
|
"""CV: test clip and project vector on input with invalid geometry."""
|
|
from natcap.invest import coastal_vulnerability
|
|
workspace_dir = self.workspace_dir
|
|
base_vector_path = os.path.join(workspace_dir, 'invalid.gpkg')
|
|
n_features = make_vector_of_invalid_geoms(base_vector_path)
|
|
vector_info = pygeoprocessing.get_vector_info(base_vector_path)
|
|
clipping_box = vector_info['bounding_box']
|
|
target_srs_wkt = vector_info['projection_wkt']
|
|
tmp_vector_path = os.path.join(workspace_dir, 'tmp_clipped.gpkg')
|
|
target_vector_path = os.path.join(
|
|
workspace_dir, 'clipped_projected.gpkg')
|
|
|
|
coastal_vulnerability.clip_and_project_vector(
|
|
base_vector_path, clipping_box, target_srs_wkt,
|
|
tmp_vector_path, target_vector_path)
|
|
|
|
# One of the invalid geometries cannot be loaded by shapely
|
|
# and will be skipped, so expect the target to have one fewer
|
|
# feature than the base.
|
|
target_vector = gdal.OpenEx(
|
|
target_vector_path, gdal.OF_VECTOR | gdal.GA_ReadOnly)
|
|
target_layer = target_vector.GetLayer()
|
|
n_actual_features = target_layer.GetFeatureCount()
|
|
self.assertTrue(n_actual_features == n_features - 1)
|
|
|
|
def test_polygon_to_lines(self):
|
|
"""CV: test a helper function that converts polygons to linestrings."""
|
|
from natcap.invest import coastal_vulnerability
|
|
# Test a polygon with inner rings to cover all paths through function.
|
|
donut_wkt = ('POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), '
|
|
'(20 30, 35 35, 30 20, 20 30))')
|
|
geometry = ogr.CreateGeometryFromWkt(donut_wkt)
|
|
shapely_geom = shapely.wkb.loads(bytes(geometry.ExportToWkb()))
|
|
line_list = coastal_vulnerability.polygon_to_lines(shapely_geom)
|
|
# Polygon has 4 sides on exterior, 3 on interior, expect 7 lines
|
|
self.assertTrue(len(line_list) == 7)
|
|
|
|
|
|
def assert_pickled_arrays_almost_equal(
|
|
actual_values_pickle_path, expected_values_json_path):
|
|
"""Open a pickled dict and assert keys and values match expected.
|
|
|
|
Expected data is stored in json as pickles are really only reliably
|
|
read by the same program that wrote them.
|
|
"""
|
|
with open(actual_values_pickle_path, 'rb') as pickle_file:
|
|
actual_values_dict = pickle.load(pickle_file)
|
|
actual_values = list(actual_values_dict.values())
|
|
actual_fids = list(actual_values_dict.keys())
|
|
|
|
with open(expected_values_json_path, 'r') as json_file:
|
|
expected_values_dict = json.load(json_file)
|
|
expected_values_dict = {
|
|
int(x[0]): float(x[1]) for x in expected_values_dict.items()}
|
|
# the dict items need sorting by FID to match the pre-sorted pickled items
|
|
expected_fids = [x[0] for x in sorted(expected_values_dict.items())]
|
|
expected_values = [x[1] for x in sorted(expected_values_dict.items())]
|
|
|
|
numpy.testing.assert_allclose(
|
|
actual_values, expected_values, rtol=0, atol=1e-2)
|
|
numpy.testing.assert_array_equal(actual_fids, expected_fids)
|
|
|
|
|
|
class CoastalVulnerabilityValidationTests(unittest.TestCase):
|
|
"""Tests for the CV Model MODEL_SPEC and validation."""
|
|
|
|
def setUp(self):
|
|
"""Create a temporary workspace."""
|
|
self.workspace_dir = tempfile.mkdtemp()
|
|
self.base_required_keys = [
|
|
'workspace_dir',
|
|
'aoi_vector_path',
|
|
'model_resolution',
|
|
'landmass_vector_path',
|
|
'wwiii_vector_path',
|
|
'max_fetch_distance',
|
|
'bathymetry_raster_path',
|
|
'shelf_contour_vector_path',
|
|
'dem_path',
|
|
'dem_averaging_radius',
|
|
'habitat_table_path',
|
|
]
|
|
|
|
def tearDown(self):
|
|
"""Remove the temporary workspace after a test."""
|
|
shutil.rmtree(self.workspace_dir)
|
|
|
|
def test_missing_keys(self):
|
|
"""CV Validate: assert missing required keys."""
|
|
from natcap.invest import coastal_vulnerability
|
|
from natcap.invest import validation
|
|
|
|
# empty args dict.
|
|
validation_errors = coastal_vulnerability.validate({})
|
|
invalid_keys = validation.get_invalid_keys(validation_errors)
|
|
expected_missing_keys = set(
|
|
self.base_required_keys)
|
|
self.assertEqual(invalid_keys, expected_missing_keys)
|
|
|
|
def test_missing_keys_geomorphology(self):
|
|
"""CV Validate: assert missing geomorphology keys."""
|
|
from natcap.invest import coastal_vulnerability
|
|
from natcap.invest import validation
|
|
|
|
args = {'geomorphology_vector_path': 'foo.shp'}
|
|
validation_errors = coastal_vulnerability.validate(args)
|
|
invalid_keys = validation.get_invalid_keys(validation_errors)
|
|
expected_missing_keys = set(
|
|
self.base_required_keys +
|
|
['geomorphology_fill_value',
|
|
'geomorphology_vector_path'])
|
|
self.assertEqual(invalid_keys, expected_missing_keys)
|
|
|
|
def test_missing_keys_population(self):
|
|
"""CV Validate: assert missing population keys."""
|
|
from natcap.invest import coastal_vulnerability
|
|
from natcap.invest import validation
|
|
|
|
args = {'population_raster_path': 'foo.tif'}
|
|
validation_errors = coastal_vulnerability.validate(args)
|
|
invalid_keys = validation.get_invalid_keys(validation_errors)
|
|
expected_missing_keys = set(
|
|
self.base_required_keys +
|
|
['population_raster_path',
|
|
'population_radius'])
|
|
self.assertEqual(invalid_keys, expected_missing_keys)
|
|
|
|
def test_missing_keys_sealevelrise(self):
|
|
"""CV Validate: assert missing sealevelrise keys."""
|
|
from natcap.invest import coastal_vulnerability
|
|
from natcap.invest import validation
|
|
|
|
args = {'slr_vector_path': 'foo.shp'}
|
|
validation_errors = coastal_vulnerability.validate(args)
|
|
invalid_keys = validation.get_invalid_keys(validation_errors)
|
|
expected_missing_keys = set(
|
|
self.base_required_keys +
|
|
['slr_vector_path',
|
|
'slr_field'])
|
|
self.assertEqual(invalid_keys, expected_missing_keys)
|
|
|
|
def test_missing_sealevelrise_field(self):
|
|
"""CV Validate: test catching SLR field not present in vector."""
|
|
from natcap.invest import coastal_vulnerability
|
|
from natcap.invest import validation
|
|
slr_vector_path = os.path.join(
|
|
INPUT_DATA, 'sea_level_rise.gpkg')
|
|
err_list = coastal_vulnerability.validate(
|
|
{'slr_vector_path': slr_vector_path,
|
|
'slr_field': 'foo'})
|
|
# sea_level_rise.gpkg has one field called 'Trend'
|
|
expected_err = (
|
|
['slr_field'],
|
|
validation.MESSAGES['INVALID_OPTION'].format(option_list=['Trend']))
|
|
self.assertTrue(expected_err in err_list)
|
|
|
|
|
|
def make_slr_vector(slr_point_vector_path, fieldname, shapely_feature, srs):
|
|
"""Create an SLR vector with a single point feature.
|
|
|
|
Args:
|
|
slr_point_vector_path (string): path to the target vector
|
|
fieldname (string): name of a field to be created in target vector
|
|
shapely_feautre (Point): shapely Point object
|
|
srs (osr.SpatialReference)
|
|
|
|
Returns:
|
|
None
|
|
|
|
"""
|
|
driver = ogr.GetDriverByName('ESRI Shapefile')
|
|
out_vector = driver.CreateDataSource(slr_point_vector_path)
|
|
layer_name = os.path.basename(os.path.splitext(slr_point_vector_path)[0])
|
|
out_layer = out_vector.CreateLayer(layer_name, srs=srs)
|
|
field_defn = ogr.FieldDefn(fieldname, ogr.OFTReal)
|
|
out_layer.CreateField(field_defn)
|
|
layer_defn = out_layer.GetLayerDefn()
|
|
new_feature = ogr.Feature(layer_defn)
|
|
new_geometry = ogr.CreateGeometryFromWkb(shapely_feature.wkb)
|
|
new_feature.SetGeometry(new_geometry)
|
|
new_feature.SetField(fieldname, 1.3) # any value will do
|
|
out_layer.CreateFeature(new_feature)
|
|
out_layer = None
|
|
out_vector = None
|
|
|
|
|
|
def make_vector_of_invalid_geoms(target_vector_path):
|
|
"""Make features out of various invalid geometries.
|
|
|
|
Most example geometries come from: https://github.com/tudelft3d/prepair
|
|
Three of the geomtries below can be fixed with a 0-width buffer.
|
|
One geometry below cannot be loaded by shapely or fixed by a buffer.
|
|
I don't have an example that CAN be loaded by shapely, but CANNOT be
|
|
fixed by the buffer.
|
|
|
|
Make these geometries non-overlapping because some of the CV functions
|
|
being tested with these do a cascaded union. Isolating the geometries
|
|
makes it possible to assert that the number of geometries going in equals
|
|
the number of features coming out, regardless of that union.
|
|
|
|
Returns:
|
|
Integer representing the number of features created
|
|
|
|
"""
|
|
# 1: Bowtie polygon - fixed by buffer
|
|
invalid_bowtie_polygon = ogr.CreateGeometryFromWkt(
|
|
'POLYGON ((-20 -20, -16 -20, -20 -16, -16 -16, -20 -20))')
|
|
assert not invalid_bowtie_polygon.IsValid()
|
|
|
|
# 2: Inner ring with one edge sharing part of an edge of the outer ring
|
|
# - fixed by buffer
|
|
invalid_shared_edge_polygon = ogr.CreateGeometryFromWkt(
|
|
'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0),(5 2,5 7,10 7, 10 2, 5 2))')
|
|
assert not invalid_shared_edge_polygon.IsValid()
|
|
|
|
# 3: Dangling edge - fixed by buffer
|
|
dangle_geom = ('POLYGON((100 100, 110 100, 115 105, 110 100, 110 110, '
|
|
'100 110, 100 100))')
|
|
invalid_dangling_edge_polygon = ogr.CreateGeometryFromWkt(dangle_geom)
|
|
assert not invalid_dangling_edge_polygon.IsValid()
|
|
|
|
# One invalid geom that cannot be loaded by shapely or fixed by buffer
|
|
# We expect this polygon to be skipped by the CV functions being tested.
|
|
# 4: invalid open ring polygon
|
|
invalid_open_ring_polygon = ogr.CreateGeometryFromWkt(
|
|
'POLYGON ((2 -2, 6 -2, 6 -6, 2 -6))')
|
|
assert not invalid_open_ring_polygon.IsValid()
|
|
|
|
gpkg_driver = gdal.GetDriverByName('GPKG')
|
|
srs = osr.SpatialReference()
|
|
srs.ImportFromEPSG(32731) # WGS84/UTM zone 31s
|
|
target_vector = gpkg_driver.Create(
|
|
target_vector_path, 0, 0, 0, gdal.GDT_Unknown)
|
|
target_layer = target_vector.CreateLayer(
|
|
'target_layer', srs, ogr.wkbUnknown)
|
|
|
|
target_layer.StartTransaction()
|
|
input_geom_list = [invalid_bowtie_polygon,
|
|
invalid_shared_edge_polygon,
|
|
invalid_dangling_edge_polygon,
|
|
invalid_open_ring_polygon]
|
|
for geometry in input_geom_list:
|
|
outflow_feature = ogr.Feature(target_layer.GetLayerDefn())
|
|
outflow_feature.SetGeometry(geometry)
|
|
target_layer.CreateFeature(outflow_feature)
|
|
target_layer.CommitTransaction()
|
|
|
|
target_layer = None
|
|
target_vector = None
|
|
|
|
return len(input_geom_list)
|