Fix merge conflict (#1774)

This commit is contained in:
Claire Simpson 2025-05-06 11:49:15 -06:00
commit 4e4bfb03c8
8 changed files with 304 additions and 30 deletions

View File

@ -76,8 +76,13 @@ Workbench
* Fixed a bug that did not allow users to select a folder as the location
to extract a datastack archive.
(`#1879 <https://github.com/natcap/invest/issues/1879>`_).
* Metadata is now copied (if it exists) and/or generated for files when creating
a datastack (`#1774 <https://github.com/natcap/invest/issues/1774>`_).
* When a parameter from a previous model run is changed, the model status
indicator (e.g., the "Model Complete" notice) is cleared to help prevent
confusion about which parameters went into the most recent model run
(`#1655 <https://github.com/natcap/invest/issues/1655>`_).
* Metadata is now generated for files when creating a datastack with any
existing user-added metadata preserved
(`#1774 <https://github.com/natcap/invest/issues/1774>`_).
Crop Production
===============
@ -95,6 +100,12 @@ Seasonal Water Yield
were being incorrectly converted to 0's
(`#1592 <https://github.com/natcap/invest/issues/1592>`_).
Wind Energy
===========
* Fixed a bug where the model would error if no AOI was provided when run from
the workbench or from a datastack file where the value for 'aoi_vector_path'
was an empty string. (`#1900 <https://github.com/natcap/invest/issues/1900>`_)
3.15.0 (2025-04-03)
-------------------

View File

@ -12,6 +12,10 @@ import numpy
import pandas
import pygeoprocessing
import scipy.spatial
import shapely.errors
import shapely.geometry
import shapely.prepared
import shapely.wkb
import taskgraph
from osgeo import gdal
from osgeo import ogr
@ -115,7 +119,8 @@ MODEL_SPEC = {
"about": spec_utils.LULC['about'] + " " + gettext(
"All values in this raster must "
"have corresponding entries in the Biophysical Table."),
"projected": True
"projected": True,
"projection_units": u.meter
},
"pools_to_calculate": {
"type": "option_string",
@ -167,6 +172,8 @@ MODEL_SPEC = {
"Used only for the asymptotic model.")}
},
"geometries": spec_utils.POLYGONS,
"projected": True,
"projection_units": u.meter,
"required": "compute_forest_edge_effects",
"about": gettext(
"Map storing the optimal regression model for each tropical "
@ -251,6 +258,13 @@ MODEL_SPEC = {
"bands": {1: {
"type": "number", "units": u.metric_ton/u.hectare
}}
},
"regression_model_params_clipped.shp": {
"about": (
"The Global Regression Models shapefile clipped "
"to the study area."),
"geometries": spec_utils.POLYGONS,
"fields": {}
}
}
},
@ -409,6 +423,9 @@ def execute(args):
f'tropical_forest_edge_carbon_stocks{file_suffix}.tif')
output_file_registry['non_forest_mask'] = os.path.join(
intermediate_dir, f'non_forest_mask{file_suffix}.tif')
output_file_registry['tropical_forest_edge_clipped'] = os.path.join(
intermediate_dir,
f'regression_model_params_clipped{file_suffix}.shp')
# Map non-forest landcover codes to carbon biomasses
LOGGER.info('Calculating direct mapped carbon stocks')
@ -445,15 +462,26 @@ def execute(args):
output_file_registry['non_forest_mask']],
task_name='map_distance_from_forest_edge')
# Clip global regression model vector to LULC raster bounding box
LOGGER.info('Clipping global forest carbon edge regression models vector')
clip_forest_edge_carbon_vector_task = task_graph.add_task(
func=_clip_global_regression_models_vector,
args=(args['lulc_raster_path'],
args['tropical_forest_edge_carbon_model_vector_path'],
output_file_registry['tropical_forest_edge_clipped']),
target_path_list=[output_file_registry['tropical_forest_edge_clipped']],
task_name='clip_forest_edge_carbon_vector')
# Build spatial index for gridded global model for closest 3 points
LOGGER.info('Building spatial index for forest edge models.')
build_spatial_index_task = task_graph.add_task(
func=_build_spatial_index,
args=(args['lulc_raster_path'], intermediate_dir,
args['tropical_forest_edge_carbon_model_vector_path'],
output_file_registry['tropical_forest_edge_clipped'],
output_file_registry['spatial_index_pickle']),
target_path_list=[output_file_registry['spatial_index_pickle']],
task_name='build_spatial_index')
task_name='build_spatial_index',
dependent_task_list=[clip_forest_edge_carbon_vector_task])
# calculate the carbon edge effect on forests
LOGGER.info('Calculating forest edge carbon')
@ -721,7 +749,8 @@ def _map_distance_from_tropical_forest_edge(
edge_distance_raster = gdal.OpenEx(edge_distance_path, gdal.GA_Update)
edge_distance_band = edge_distance_raster.GetRasterBand(1)
for offset_dict in pygeoprocessing.iterblocks((base_lulc_raster_path, 1), offset_only=True):
for offset_dict in pygeoprocessing.iterblocks((base_lulc_raster_path, 1),
offset_only=True):
# where LULC has nodata, overwrite edge distance with nodata value
lulc_block = lulc_band.ReadAsArray(**offset_dict)
distance_block = edge_distance_band.ReadAsArray(**offset_dict)
@ -733,6 +762,93 @@ def _map_distance_from_tropical_forest_edge(
yoff=offset_dict['yoff'])
def _clip_global_regression_models_vector(
lulc_raster_path, source_vector_path, target_vector_path):
"""Clip the global carbon edge model shapefile
Clip the shapefile containing the global carbon edge model parameters
to the bounding box of the LULC raster (representing the target study area)
plus a buffer to account for the kd-tree lookup DISTANCE_UPPER_BOUND
Args:
lulc_raster_path (string): path to a raster that is used to define the
bounding box to use for clipping.
source_vector_path (string): a path to an OGR shapefile to be clipped.
target_vector_path (string): a path to an OGR shapefile to store the
clipped vector.
Returns:
None
"""
raster_info = pygeoprocessing.get_raster_info(lulc_raster_path)
vector_info = pygeoprocessing.get_vector_info(source_vector_path)
bbox_minx, bbox_miny, bbox_maxx, bbox_maxy = raster_info['bounding_box']
buffered_bb = [
bbox_minx - DISTANCE_UPPER_BOUND,
bbox_miny - DISTANCE_UPPER_BOUND,
bbox_maxx + DISTANCE_UPPER_BOUND,
bbox_maxy + DISTANCE_UPPER_BOUND,
]
# Reproject the LULC bounding box to the vector's projection for clipping
mask_bb = pygeoprocessing.transform_bounding_box(buffered_bb,
raster_info['projection_wkt'], vector_info['projection_wkt'])
shapely_mask = shapely.prepared.prep(shapely.geometry.box(*mask_bb))
base_vector = gdal.OpenEx(source_vector_path, gdal.OF_VECTOR)
base_layer = base_vector.GetLayer()
base_layer_defn = base_layer.GetLayerDefn()
base_geom_type = base_layer.GetGeomType()
target_driver = gdal.GetDriverByName('ESRI Shapefile')
target_vector = target_driver.Create(
target_vector_path, 0, 0, 0, gdal.GDT_Unknown)
target_layer = target_vector.CreateLayer(
base_layer_defn.GetName(), base_layer.GetSpatialRef(), base_geom_type)
target_layer.CreateFields(base_layer.schema)
target_layer.StartTransaction()
invalid_feature_count = 0
for feature in base_layer:
invalid = False
geometry = feature.GetGeometryRef()
try:
shapely_geom = shapely.wkb.loads(bytes(geometry.ExportToWkb()))
# Catch invalid geometries that cannot be loaded by Shapely;
# e.g. polygons with too few points for their type
except shapely.errors.ShapelyError:
invalid = True
else:
if shapely_geom.is_valid:
# Check for intersection rather than use gdal.Layer.Clip()
# to preserve the shape of the polygons (we use the centroid
# when constructing the kd-tree)
if shapely_mask.intersects(shapely_geom):
new_feature = ogr.Feature(target_layer.GetLayerDefn())
new_feature.SetGeometry(ogr.CreateGeometryFromWkb(
shapely_geom.wkb))
for field_name, field_value in feature.items().items():
new_feature.SetField(field_name, field_value)
target_layer.CreateFeature(new_feature)
else:
invalid = True
finally:
if invalid:
invalid_feature_count += 1
LOGGER.warning(
f"The geometry at feature {feature.GetFID()} is invalid "
"and will be skipped.")
target_layer.CommitTransaction()
if invalid_feature_count:
LOGGER.warning(
f"{invalid_feature_count} features in {source_vector_path} "
"were found to be invalid during clipping and were skipped.")
def _build_spatial_index(
base_raster_path, local_model_dir,
tropical_forest_edge_carbon_model_vector_path,
@ -770,10 +886,9 @@ def _build_spatial_index(
lulc_projection_wkt = pygeoprocessing.get_raster_info(
base_raster_path)['projection_wkt']
with utils._set_gdal_configuration('OGR_ENABLE_PARTIAL_REPROJECTION', 'TRUE'):
pygeoprocessing.reproject_vector(
tropical_forest_edge_carbon_model_vector_path, lulc_projection_wkt,
carbon_model_reproject_path)
pygeoprocessing.reproject_vector(
tropical_forest_edge_carbon_model_vector_path, lulc_projection_wkt,
carbon_model_reproject_path)
model_vector = gdal.OpenEx(carbon_model_reproject_path)
model_layer = model_vector.GetLayer()
@ -785,17 +900,14 @@ def _build_spatial_index(
# put all the polygons in the kd_tree because it's fast and simple
for poly_feature in model_layer:
poly_geom = poly_feature.GetGeometryRef()
if poly_geom.IsValid():
poly_centroid = poly_geom.Centroid()
# put in row/col order since rasters are row/col indexed
kd_points.append([poly_centroid.GetY(), poly_centroid.GetX()])
poly_centroid = poly_geom.Centroid()
# put in row/col order since rasters are row/col indexed
kd_points.append([poly_centroid.GetY(), poly_centroid.GetX()])
theta_model_parameters.append([
poly_feature.GetField(feature_id) for feature_id in
['theta1', 'theta2', 'theta3']])
method_model_parameter.append(poly_feature.GetField('method'))
else:
LOGGER.warning(f'skipping invalid geometry {poly_geom}')
theta_model_parameters.append([
poly_feature.GetField(feature_id) for feature_id in
['theta1', 'theta2', 'theta3']])
method_model_parameter.append(poly_feature.GetField('method'))
method_model_parameter = numpy.array(
method_model_parameter, dtype=numpy.int32)

View File

@ -757,7 +757,7 @@ def execute(args):
target_path_list=[wind_data_pickle_path],
task_name='compute_density_harvested_fields')
if 'aoi_vector_path' in args:
if 'aoi_vector_path' in args and args['aoi_vector_path'] != '':
LOGGER.info('AOI Provided')
aoi_vector_path = args['aoi_vector_path']

View File

@ -1,17 +1,18 @@
"""Module for Regression Testing the InVEST Forest Carbon Edge model."""
import unittest
import tempfile
import shutil
import os
import pickle
import shutil
import tempfile
import unittest
import numpy
import pandas
from osgeo import gdal, osr, ogr
import pygeoprocessing
import shapely
from osgeo import gdal
from osgeo import ogr
from osgeo import osr
from shapely.geometry import Polygon
import pygeoprocessing
import pickle
import pygeoprocessing
gdal.UseExceptions()
REGRESSION_DATA = os.path.join(
@ -530,6 +531,146 @@ class ForestCarbonEdgeTests(unittest.TestCase):
self.assertAlmostEqual(actual_output[3, 5], 274.47815)
self.assertAlmostEqual(actual_output[0, 1], -1)
def test_invalid_geometries(self):
"""Forest Carbon: Check handling of invalid vector geometries."""
from natcap.invest import forest_carbon_edge_effect
srs = osr.SpatialReference()
srs.ImportFromEPSG(26910)
projection_wkt = srs.ExportToWkt()
lulc_matrix = numpy.ones((5,5), dtype=numpy.int8)
lulc_raster_path = os.path.join(self.workspace_dir, 'lulc.tif')
pygeoprocessing.numpy_array_to_raster(
lulc_matrix, 255, (1, -1), (461262, 4923269), projection_wkt,
lulc_raster_path)
fields_polys = {'geom_id': ogr.OFTInteger}
attrs_polys = [
{'geom_id': 1},
{'geom_id': 2},
{'geom_id': 3}
]
# valid polygon
valid_poly = Polygon([
(461265, 4923269), (461267, 4923269), (461267, 4923267),
(461265, 4923267), (461265, 4923269)])
self.assertTrue(shapely.is_valid(valid_poly))
# invalid bowtie polygon
invalid_bowtie_poly = Polygon([
(461262, 4923267), (461264, 4923267), (461262, 4923265),
(461264, 4923265), (461262, 4923267)])
self.assertFalse(shapely.is_valid(invalid_bowtie_poly))
# invalid bowtie polygon with vertex in the middle
invalid_alt_bowtie_poly = Polygon([
(461262, 4923267), (461264, 4923267), (461263, 4923266),
(461264, 4923265), (461262, 4923265), (461263, 4923266),
(461262, 4923267)])
self.assertFalse(shapely.is_valid(invalid_alt_bowtie_poly))
test_vector_path = os.path.join(self.workspace_dir, 'vector.gpkg')
pygeoprocessing.shapely_geometry_to_vector(
[valid_poly, invalid_bowtie_poly,
invalid_alt_bowtie_poly],
test_vector_path, projection_wkt, 'GPKG',
fields=fields_polys,
attribute_list=attrs_polys)
test_vector = gdal.OpenEx(
test_vector_path, gdal.OF_VECTOR | gdal.GA_ReadOnly)
test_layer = test_vector.GetLayer()
self.assertEqual(test_layer.GetFeatureCount(), 3)
test_layer = None
test_vector = None
target_vector_path = os.path.join(
self.workspace_dir, 'checked_geometries.gpkg')
forest_carbon_edge_effect._clip_global_regression_models_vector(
lulc_raster_path, test_vector_path, target_vector_path)
target_vector = gdal.OpenEx(target_vector_path, gdal.OF_VECTOR)
target_layer = target_vector.GetLayer()
self.assertEqual(
target_layer.GetFeatureCount(), 1)
# valid_polygon geom_id == 1
self.assertEqual(target_layer.GetFeature(0).GetField('geom_id'), 1)
target_layer = None
target_vector = None
def test_vector_clipping_buffer(self):
"""Forest Carbon: Check DISTANCE_UPPER_BOUND buffer."""
from natcap.invest import forest_carbon_edge_effect
srs = osr.SpatialReference()
srs.ImportFromEPSG(26910)
projection_wkt = srs.ExportToWkt()
lulc_matrix = numpy.ones((5,5), dtype=numpy.int8)
lulc_raster_path = os.path.join(self.workspace_dir, 'lulc.tif')
pygeoprocessing.numpy_array_to_raster(
lulc_matrix, 255, (1000, -1000), (461261, 4923270), projection_wkt,
lulc_raster_path)
# get LULC bouding box; will be used to construct polygons
raster_info = pygeoprocessing.get_raster_info(lulc_raster_path)
bbox_minx, bbox_miny, bbox_maxx, bbox_maxy = raster_info['bounding_box']
fields_polys = {'geom_id': ogr.OFTInteger}
attrs_polys = [
{'geom_id': 1},
{'geom_id': 2},
{'geom_id': 3},
{'geom_id': 4}
]
# polygon that intersects with the lulc
in_bbox_poly = shapely.geometry.box(
bbox_maxx - 150, bbox_maxy - 150,
bbox_maxx - 50, bbox_maxy - 50)
self.assertTrue(shapely.is_valid(in_bbox_poly))
# polygon outside of lulc bb, but within buffer distance
in_buffer_poly = shapely.geometry.box(
bbox_maxx + 50, bbox_maxy + 50,
bbox_maxx + 150, bbox_maxy + 150)
self.assertTrue(shapely.is_valid(in_buffer_poly))
buffer = forest_carbon_edge_effect.DISTANCE_UPPER_BOUND
# polygon on the edge of the buffer
buffer_edge_poly = shapely.geometry.box(
bbox_maxx + buffer + 50, bbox_maxy + buffer + 50,
bbox_maxx + buffer - 50, bbox_maxy + buffer - 50)
self.assertTrue(shapely.is_valid(buffer_edge_poly))
# polygon outside of buffer distance
out_of_buffer_poly = shapely.geometry.box(
bbox_maxx + buffer + 50, bbox_maxy + buffer + 50,
bbox_maxx + buffer + 150, bbox_maxy + buffer + 150)
self.assertTrue(shapely.is_valid(out_of_buffer_poly))
test_vector_path = os.path.join(self.workspace_dir, 'vector.gpkg')
pygeoprocessing.shapely_geometry_to_vector(
[in_bbox_poly, in_buffer_poly,
buffer_edge_poly, out_of_buffer_poly],
test_vector_path, projection_wkt, 'GPKG',
fields=fields_polys,
attribute_list=attrs_polys)
target_vector_path = os.path.join(
self.workspace_dir, 'checked_geometries.gpkg')
forest_carbon_edge_effect._clip_global_regression_models_vector(
lulc_raster_path, test_vector_path, target_vector_path)
vector = gdal.OpenEx(
target_vector_path, gdal.OF_VECTOR | gdal.GA_ReadOnly)
layer = vector.GetLayer()
self.assertEqual(layer.GetFeatureCount(), 3)
feat_ids = [feat.GetField('geom_id') for feat in layer]
self.assertEqual(feat_ids, [1, 2, 3])
@staticmethod
def _test_same_files(base_list_path, directory_path):
"""Assert files in `base_list_path` are in `directory_path`.

View File

@ -299,6 +299,8 @@ class InvestTab extends React.Component {
sidebarFooterElementId={sidebarFooterElementId}
executeClicked={executeClicked}
switchTabs={this.switchTabs}
tabID={tabID}
updateJobProperties={this.props.updateJobProperties}
/>
</TabPane>
<TabPane

View File

@ -261,7 +261,7 @@ export default function ArgInput(props) {
name={argkey}
value={value}
onChange={handleChange}
onFocus={handleChange}
onFocus={handleFocus}
disabled={!enabled}
isValid={enabled && isValid}
custom

View File

@ -383,6 +383,7 @@ class SetupTab extends React.Component {
* Updating means:
* 1) setting the value
* 2) toggling the enabled/disabled/hidden state of any dependent args
* 3) clearing job status in case args are being updated after the model has been run.
*
* @param {string} key - the invest argument key
* @param {string} value - the invest argument value
@ -394,6 +395,9 @@ class SetupTab extends React.Component {
this.setState({
argsValues: argsValues,
}, () => {
this.props.updateJobProperties(this.props.tabID, {
status: undefined, // Clear job status to hide model status indicator.
});
this.debouncedValidate();
this.callUISpecFunctions();
});
@ -646,4 +650,6 @@ SetupTab.propTypes = {
sidebarFooterElementId: PropTypes.string.isRequired,
executeClicked: PropTypes.bool.isRequired,
switchTabs: PropTypes.func.isRequired,
tabID: PropTypes.string.isRequired,
updateJobProperties: PropTypes.func.isRequired,
};

View File

@ -75,6 +75,8 @@ function renderSetupFromSpec(baseSpec, uiSpec, initValues = undefined) {
sidebarFooterElementId="foo"
executeClicked={false}
switchTabs={() => {}}
tabID={'999'}
updateJobProperties={() => {}}
/>
);
return utils;