remove utils.array_equals_nodata and replace with pygeoprocessing.array_equals_nodata

This commit is contained in:
Emily Soth 2023-10-30 15:55:33 -07:00
parent 357c310ed1
commit d0b880e564
23 changed files with 109 additions and 174 deletions

View File

@ -1026,19 +1026,19 @@ def fractp_op(
# and retain their original nodata values.
# out_nodata is defined above and should never be None.
valid_mask = (
~utils.array_equals_nodata(Kc, nodata_dict['out_nodata']) &
~utils.array_equals_nodata(root, nodata_dict['out_nodata']) &
~utils.array_equals_nodata(veg, nodata_dict['out_nodata']) &
~utils.array_equals_nodata(precip, 0))
~pygeoprocessing.array_equals_nodata(Kc, nodata_dict['out_nodata']) &
~pygeoprocessing.array_equals_nodata(root, nodata_dict['out_nodata']) &
~pygeoprocessing.array_equals_nodata(veg, nodata_dict['out_nodata']) &
~pygeoprocessing.array_equals_nodata(precip, 0))
if nodata_dict['eto'] is not None:
valid_mask &= ~utils.array_equals_nodata(eto, nodata_dict['eto'])
valid_mask &= ~pygeoprocessing.array_equals_nodata(eto, nodata_dict['eto'])
if nodata_dict['precip'] is not None:
valid_mask &= ~utils.array_equals_nodata(precip, nodata_dict['precip'])
valid_mask &= ~pygeoprocessing.array_equals_nodata(precip, nodata_dict['precip'])
if nodata_dict['depth_root'] is not None:
valid_mask &= ~utils.array_equals_nodata(
valid_mask &= ~pygeoprocessing.array_equals_nodata(
soil, nodata_dict['depth_root'])
if nodata_dict['pawc'] is not None:
valid_mask &= ~utils.array_equals_nodata(pawc, nodata_dict['pawc'])
valid_mask &= ~pygeoprocessing.array_equals_nodata(pawc, nodata_dict['pawc'])
# Compute Budyko Dryness index
# Use the original AET equation if the land cover type is vegetation

View File

@ -531,7 +531,7 @@ def _accumulate_totals(raster_path):
# the sum. Users calculated the sum with ArcGIS zonal statistics,
# noticed a difference and wrote to us about it on the forum.
raster_sum += numpy.sum(
block[~utils.array_equals_nodata(
block[~pygeoprocessing.array_equals_nodata(
block, nodata)], dtype=numpy.float64)
return raster_sum

View File

@ -1587,9 +1587,9 @@ def _calculate_accumulation_over_time(
target_matrix[:] = NODATA_FLOAT32_MIN
valid_pixels = (
~utils.array_equals_nodata(annual_biomass_matrix, NODATA_FLOAT32_MIN) &
~utils.array_equals_nodata(annual_soil_matrix, NODATA_FLOAT32_MIN) &
~utils.array_equals_nodata(annual_litter_matrix, NODATA_FLOAT32_MIN))
~pygeoprocessing.array_equals_nodata(annual_biomass_matrix, NODATA_FLOAT32_MIN) &
~pygeoprocessing.array_equals_nodata(annual_soil_matrix, NODATA_FLOAT32_MIN) &
~pygeoprocessing.array_equals_nodata(annual_litter_matrix, NODATA_FLOAT32_MIN))
target_matrix[valid_pixels] = (
(annual_biomass_matrix[valid_pixels] +
@ -1687,14 +1687,14 @@ def _track_disturbance(
disturbance_magnitude_matrix.shape, dtype=numpy.float32)
disturbed_carbon_volume[:] = NODATA_FLOAT32_MIN
disturbed_carbon_volume[
~utils.array_equals_nodata(disturbance_magnitude_matrix,
~pygeoprocessing.array_equals_nodata(disturbance_magnitude_matrix,
NODATA_FLOAT32_MIN)] = 0.0
if year_of_disturbance_band:
known_transition_years_matrix = (
year_of_disturbance_band.ReadAsArray(**block_info))
pixels_previously_disturbed = (
~utils.array_equals_nodata(
~pygeoprocessing.array_equals_nodata(
known_transition_years_matrix, NODATA_UINT16_MAX))
year_last_disturbed[pixels_previously_disturbed] = (
known_transition_years_matrix[pixels_previously_disturbed])
@ -1706,9 +1706,9 @@ def _track_disturbance(
stock_matrix = stock_band.ReadAsArray(**block_info)
pixels_changed_this_year = (
~utils.array_equals_nodata(disturbance_magnitude_matrix, NODATA_FLOAT32_MIN) &
~utils.array_equals_nodata(disturbance_magnitude_matrix, 0.0) &
~utils.array_equals_nodata(stock_matrix, NODATA_FLOAT32_MIN)
~pygeoprocessing.array_equals_nodata(disturbance_magnitude_matrix, NODATA_FLOAT32_MIN) &
~pygeoprocessing.array_equals_nodata(disturbance_magnitude_matrix, 0.0) &
~pygeoprocessing.array_equals_nodata(stock_matrix, NODATA_FLOAT32_MIN)
)
disturbed_carbon_volume[pixels_changed_this_year] = (
@ -1773,7 +1773,7 @@ def _calculate_net_sequestration(
dtype=bool)
if accumulation_nodata is not None:
valid_accumulation_pixels &= (
~utils.array_equals_nodata(
~pygeoprocessing.array_equals_nodata(
accumulation_matrix, accumulation_nodata))
target_matrix[valid_accumulation_pixels] += (
accumulation_matrix[valid_accumulation_pixels])
@ -1781,7 +1781,7 @@ def _calculate_net_sequestration(
valid_emissions_pixels = ~numpy.isclose(emissions_matrix, 0.0)
if emissions_nodata is not None:
valid_emissions_pixels &= (
~utils.array_equals_nodata(emissions_matrix, emissions_nodata))
~pygeoprocessing.array_equals_nodata(emissions_matrix, emissions_nodata))
target_matrix[valid_emissions_pixels] = emissions_matrix[
valid_emissions_pixels] * -1
@ -1822,9 +1822,9 @@ def _calculate_emissions(
zero_half_life = numpy.isclose(carbon_half_life_matrix, 0.0)
valid_pixels = (
~utils.array_equals_nodata(
~pygeoprocessing.array_equals_nodata(
carbon_disturbed_matrix, NODATA_FLOAT32_MIN) &
~utils.array_equals_nodata(
~pygeoprocessing.array_equals_nodata(
year_of_last_disturbance_matrix, NODATA_UINT16_MAX) &
~zero_half_life)
@ -1909,7 +1909,7 @@ def _sum_n_rasters(
array = band.ReadAsArray(**block_info)
valid_pixels = slice(None)
if nodata is not None:
valid_pixels = ~utils.array_equals_nodata(array, nodata)
valid_pixels = ~pygeoprocessing.array_equals_nodata(array, nodata)
sum_array[valid_pixels] += array[valid_pixels]
pixels_touched[valid_pixels] = 1
@ -2102,11 +2102,11 @@ def _reclassify_accumulation_transition(
valid_pixels = numpy.ones(landuse_transition_from_matrix.shape,
dtype=bool)
if from_nodata is not None:
valid_pixels &= ~utils.array_equals_nodata(
valid_pixels &= ~pygeoprocessing.array_equals_nodata(
landuse_transition_from_matrix, from_nodata)
if to_nodata is not None:
valid_pixels &= ~utils.array_equals_nodata(
valid_pixels &= ~pygeoprocessing.array_equals_nodata(
landuse_transition_to_matrix, to_nodata)
output_matrix[valid_pixels] = accumulation_rate_matrix[

View File

@ -293,8 +293,8 @@ def _create_transition_table(landcover_df, lulc_snapshot_list,
# integer type. When int matrices, we can compare directly to
# None.
valid_pixels = (
~utils.array_equals_nodata(from_array, from_nodata) &
~utils.array_equals_nodata(to_array, to_nodata))
~pygeoprocessing.array_equals_nodata(from_array, from_nodata) &
~pygeoprocessing.array_equals_nodata(to_array, to_nodata))
transition_pairs = transition_pairs.union(
set(zip(from_array[valid_pixels].flatten(),
to_array[valid_pixels].flatten())))

View File

@ -1786,7 +1786,7 @@ def extract_bathymetry_along_ray(
# if nodata value is undefined, all pixels are valid
valid_mask = slice(None)
if bathy_nodata is not None:
valid_mask = ~utils.array_equals_nodata(values, bathy_nodata)
valid_mask = ~pygeoprocessing.array_equals_nodata(values, bathy_nodata)
if numpy.any(valid_mask):
# take mean of valids and move on
value = numpy.mean(values[valid_mask])
@ -2145,7 +2145,7 @@ def zero_negative_values(depth_array, nodata):
"""Convert negative values to zero for relief."""
result_array = numpy.empty_like(depth_array)
if nodata is not None:
valid_mask = ~utils.array_equals_nodata(depth_array, nodata)
valid_mask = ~pygeoprocessing.array_equals_nodata(depth_array, nodata)
result_array[:] = nodata
result_array[valid_mask] = 0
else:
@ -3233,7 +3233,7 @@ def _aggregate_raster_values_in_radius(
xoff=pixel_x, yoff=pixel_y, win_xsize=win_xsize,
win_ysize=win_ysize)
if nodata is not None:
kernel_mask &= ~utils.array_equals_nodata(array, nodata)
kernel_mask &= ~pygeoprocessing.array_equals_nodata(array, nodata)
result[shore_id] = aggregation_op(array, kernel_mask)
with open(target_pickle_path, 'wb') as pickle_file:

View File

@ -795,7 +795,7 @@ def _zero_observed_yield_op(observed_yield_array, observed_yield_nodata):
result[:] = 0
valid_mask = slice(None)
if observed_yield_nodata is not None:
valid_mask = ~utils.array_equals_nodata(
valid_mask = ~pygeoprocessing.array_equals_nodata(
observed_yield_array, observed_yield_nodata)
result[valid_mask] = observed_yield_array[valid_mask]
return result
@ -821,7 +821,7 @@ def _mask_observed_yield_op(
result = numpy.empty(lulc_array.shape, dtype=numpy.float32)
if landcover_nodata is not None:
result[:] = observed_yield_nodata
valid_mask = ~utils.array_equals_nodata(lulc_array, landcover_nodata)
valid_mask = ~pygeoprocessing.array_equals_nodata(lulc_array, landcover_nodata)
result[valid_mask] = 0
else:
result[:] = 0
@ -895,7 +895,7 @@ def tabulate_results(
# if nodata value undefined, assume all pixels are valid
valid_mask = numpy.full(yield_block.shape, True)
if observed_yield_nodata is not None:
valid_mask = ~utils.array_equals_nodata(
valid_mask = ~pygeoprocessing.array_equals_nodata(
yield_block, observed_yield_nodata)
production_pixel_count += numpy.count_nonzero(
valid_mask & (yield_block > 0))
@ -915,7 +915,7 @@ def tabulate_results(
(yield_percentile_raster_path, 1)):
# _NODATA_YIELD will always have a value (defined above)
yield_sum += numpy.sum(
yield_block[~utils.array_equals_nodata(
yield_block[~pygeoprocessing.array_equals_nodata(
yield_block, _NODATA_YIELD)])
production_lookup[yield_percentile_id] = yield_sum
result_table.write(",%f" % yield_sum)
@ -942,7 +942,7 @@ def tabulate_results(
(landcover_raster_path, 1)):
if landcover_nodata is not None:
total_area += numpy.count_nonzero(
~utils.array_equals_nodata(band_values, landcover_nodata))
~pygeoprocessing.array_equals_nodata(band_values, landcover_nodata))
else:
total_area += band_values.size
result_table.write(

View File

@ -858,9 +858,9 @@ def _x_yield_op(
result = numpy.empty(b_x.shape, dtype=numpy.float32)
result[:] = _NODATA_YIELD
valid_mask = (
~utils.array_equals_nodata(y_max, _NODATA_YIELD) &
~utils.array_equals_nodata(b_x, _NODATA_YIELD) &
~utils.array_equals_nodata(c_x, _NODATA_YIELD) &
~pygeoprocessing.array_equals_nodata(y_max, _NODATA_YIELD) &
~pygeoprocessing.array_equals_nodata(b_x, _NODATA_YIELD) &
~pygeoprocessing.array_equals_nodata(c_x, _NODATA_YIELD) &
(lulc_array == crop_lucode))
result[valid_mask] = pixel_area_ha * y_max[valid_mask] * (
1 - b_x[valid_mask] * numpy.exp(
@ -890,7 +890,7 @@ def _zero_observed_yield_op(observed_yield_array, observed_yield_nodata):
result[:] = 0
valid_mask = slice(None)
if observed_yield_nodata is not None:
valid_mask = ~utils.array_equals_nodata(
valid_mask = ~pygeoprocessing.array_equals_nodata(
observed_yield_array, observed_yield_nodata)
result[valid_mask] = observed_yield_array[valid_mask]
return result
@ -916,7 +916,7 @@ def _mask_observed_yield_op(
result = numpy.empty(lulc_array.shape, dtype=numpy.float32)
if landcover_nodata is not None:
result[:] = observed_yield_nodata
valid_mask = ~utils.array_equals_nodata(lulc_array, landcover_nodata)
valid_mask = ~pygeoprocessing.array_equals_nodata(lulc_array, landcover_nodata)
result[valid_mask] = 0
else:
result[:] = 0
@ -978,7 +978,7 @@ def tabulate_regression_results(
# if nodata value undefined, assume all pixels are valid
valid_mask = numpy.full(yield_block.shape, True)
if observed_yield_nodata is not None:
valid_mask = ~utils.array_equals_nodata(
valid_mask = ~pygeoprocessing.array_equals_nodata(
yield_block, observed_yield_nodata)
production_pixel_count += numpy.count_nonzero(
valid_mask & (yield_block > 0.0))
@ -996,7 +996,7 @@ def tabulate_regression_results(
(crop_production_raster_path, 1)):
yield_sum += numpy.sum(
# _NODATA_YIELD will always have a value (defined above)
yield_block[~utils.array_equals_nodata(
yield_block[~pygeoprocessing.array_equals_nodata(
yield_block, _NODATA_YIELD)])
production_lookup['modeled'] = yield_sum
result_table.write(",%f" % yield_sum)
@ -1022,7 +1022,7 @@ def tabulate_regression_results(
(landcover_raster_path, 1)):
if landcover_nodata is not None:
total_area += numpy.count_nonzero(
~utils.array_equals_nodata(band_values, landcover_nodata))
~pygeoprocessing.array_equals_nodata(band_values, landcover_nodata))
else:
total_area += band_values.size
result_table.write(

View File

@ -357,7 +357,7 @@ def _threshold_streams(flow_accum, src_nodata, out_nodata, threshold):
valid_pixels = slice(None)
if src_nodata is not None:
valid_pixels = ~utils.array_equals_nodata(flow_accum, src_nodata)
valid_pixels = ~pygeoprocessing.array_equals_nodata(flow_accum, src_nodata)
over_threshold = flow_accum > threshold
out_matrix[valid_pixels & over_threshold] = 1

View File

@ -523,7 +523,7 @@ def combine_carbon_maps(*carbon_maps):
nodata_mask = numpy.empty(carbon_maps[0].shape, dtype=bool)
nodata_mask[:] = True
for carbon_map in carbon_maps:
valid_mask = ~utils.array_equals_nodata(carbon_map, NODATA_VALUE)
valid_mask = ~pygeoprocessing.array_equals_nodata(carbon_map, NODATA_VALUE)
nodata_mask &= ~valid_mask
result[valid_mask] += carbon_map[valid_mask]
result[nodata_mask] = NODATA_VALUE
@ -721,7 +721,7 @@ def _map_distance_from_tropical_forest_edge(
# where LULC has nodata, overwrite edge distance with nodata value
lulc_block = lulc_band.ReadAsArray(**offset_dict)
distance_block = edge_distance_band.ReadAsArray(**offset_dict)
nodata_mask = utils.array_equals_nodata(lulc_block, lulc_nodata)
nodata_mask = pygeoprocessing.array_equals_nodata(lulc_block, lulc_nodata)
distance_block[nodata_mask] = lulc_nodata
edge_distance_band.WriteArray(
distance_block,

View File

@ -950,7 +950,7 @@ def _raster_values_in_bounds(raster_path_band, lower_bound, upper_bound):
values_valid = True
for _, raster_block in pygeoprocessing.iterblocks(raster_path_band):
nodata_mask = ~utils.array_equals_nodata(raster_block, raster_nodata)
nodata_mask = ~pygeoprocessing.array_equals_nodata(raster_block, raster_nodata)
if ((raster_block[nodata_mask] < lower_bound) |
(raster_block[nodata_mask] > upper_bound)).any():
values_valid = False

View File

@ -1720,7 +1720,7 @@ def _prep_input_criterion_raster(
source_rating_array.shape, _TARGET_NODATA_FLOAT32,
dtype=numpy.float32)
valid_mask = (
(~utils.array_equals_nodata(
(~pygeoprocessing.array_equals_nodata(
source_rating_array, source_nodata)) &
(source_rating_array >= 0.0))
target_rating_array[valid_mask] = source_rating_array[valid_mask]
@ -2047,7 +2047,7 @@ def _calculate_decayed_distance(stressor_raster_path, decay_type,
# distance and also valid in the source edt.
pixels_within_buffer = (
source_edt_block < buffer_distance_in_pixels)
nodata_pixels = utils.array_equals_nodata(
nodata_pixels = pygeoprocessing.array_equals_nodata(
source_edt_block, edt_nodata)
valid_pixels = ((~nodata_pixels) & pixels_within_buffer)
@ -2154,7 +2154,7 @@ def _calc_criteria(attributes_list, habitat_mask_raster_path,
# Any habitat pixels with a nodata rating (no rating
# specified by the user) should be
# interpreted as having a rating of 0.
rating[utils.array_equals_nodata(
rating[pygeoprocessing.array_equals_nodata(
rating, rating_nodata)] = 0
finally:
rating_band = None
@ -2339,7 +2339,7 @@ def _sum_rasters(raster_path_list, target_nodata, target_datatype,
pixels_have_valid_values = numpy.zeros(result.shape, dtype=bool)
valid_pixel_count = numpy.zeros(result.shape, dtype=numpy.uint16)
for array, nodata in zip(array_list, nodata_list):
non_nodata_pixels = ~utils.array_equals_nodata(array, nodata)
non_nodata_pixels = ~pygeoprocessing.array_equals_nodata(array, nodata)
pixels_have_valid_values |= non_nodata_pixels
valid_pixel_count += non_nodata_pixels

View File

@ -1085,7 +1085,7 @@ def _mask_raster(source_raster_path, mask_raster_path,
result = numpy.full(mask.shape, nodata,
dtype=source_raster_info['numpy_type'])
valid_pixels = (
~utils.array_equals_nodata(raster, nodata) &
~pygeoprocessing.array_equals_nodata(raster, nodata) &
(mask == 1))
result[valid_pixels] = raster[valid_pixels]
return result
@ -1416,8 +1416,8 @@ def calculate_ic(d_up_path, d_dn_path, target_ic_path):
def _ic_op(d_up, d_dn):
"""Calculate IC0."""
valid_mask = (
~utils.array_equals_nodata(d_up, d_up_nodata) &
~utils.array_equals_nodata(d_dn, d_dn_nodata) & (d_up != 0) &
~pygeoprocessing.array_equals_nodata(d_up, d_up_nodata) &
~pygeoprocessing.array_equals_nodata(d_dn, d_dn_nodata) & (d_up != 0) &
(d_dn != 0))
result = numpy.empty(valid_mask.shape, dtype=numpy.float32)
result[:] = ic_nodata

View File

@ -617,7 +617,7 @@ def _determine_valid_viewpoints(dem_path, structures_path):
(viewpoint[0] - dem_gt[0]) // dem_gt[1]) - block_data['xoff']
iy_viewpoint = int(
(viewpoint[1] - dem_gt[3]) // dem_gt[5]) - block_data['yoff']
if utils.array_equals_nodata(
if pygeoprocessing.array_equals_nodata(
numpy.array(dem_block[iy_viewpoint][ix_viewpoint]),
dem_nodata).any():
LOGGER.info(
@ -712,14 +712,14 @@ def _sum_valuation_rasters(dem_path, valuation_filepaths, target_path):
dem_nodata = pygeoprocessing.get_raster_info(dem_path)['nodata'][0]
def _sum_rasters(dem, *valuation_rasters):
valid_dem_pixels = ~utils.array_equals_nodata(dem, dem_nodata)
valid_dem_pixels = ~pygeoprocessing.array_equals_nodata(dem, dem_nodata)
raster_sum = numpy.empty(dem.shape, dtype=numpy.float64)
raster_sum[:] = _VALUATION_NODATA
raster_sum[valid_dem_pixels] = 0
for valuation_matrix in valuation_rasters:
valid_pixels = (
~utils.array_equals_nodata(valuation_matrix, _VALUATION_NODATA)
~pygeoprocessing.array_equals_nodata(valuation_matrix, _VALUATION_NODATA)
& valid_dem_pixels)
raster_sum[valid_pixels] += valuation_matrix[valid_pixels]
return raster_sum
@ -848,7 +848,7 @@ def _calculate_valuation(visibility_path, viewpoint, weight,
dtype=numpy.float64) * pixel_size_in_m
valid_distances = (dist_in_m <= max_valuation_radius)
nodata = utils.array_equals_nodata(vis_block, vis_nodata)
nodata = pygeoprocessing.array_equals_nodata(vis_block, vis_nodata)
valid_indexes = (valid_distances & (~nodata))
visibility_value[valid_indexes] = _valuation(dist_in_m[valid_indexes],
@ -920,7 +920,7 @@ def _clip_and_mask_dem(dem_path, aoi_path, target_path, working_dir):
dem_nodata = dem_raster_info['nodata'][0]
def _mask_op(dem, aoi_mask):
valid_pixels = (~utils.array_equals_nodata(dem, dem_nodata) &
valid_pixels = (~pygeoprocessing.array_equals_nodata(dem, dem_nodata) &
(aoi_mask == 1))
masked_dem = numpy.empty(dem.shape)
masked_dem[:] = dem_nodata
@ -977,7 +977,7 @@ def _count_and_weight_visible_structures(visibility_raster_path_list, weights,
for block_data in pygeoprocessing.iterblocks((clipped_dem_path, 1),
offset_only=True):
dem_block = dem_band.ReadAsArray(**block_data)
valid_mask = ~utils.array_equals_nodata(dem_block, dem_nodata)
valid_mask = ~pygeoprocessing.array_equals_nodata(dem_block, dem_nodata)
visibility_sum = numpy.empty(dem_block.shape, dtype=numpy.float32)
visibility_sum[:] = target_nodata
@ -1055,7 +1055,7 @@ def _calculate_visual_quality(source_raster_path, working_dir, target_path):
"""Assign zeros to nodata, excluding them from percentile calc."""
valid_mask = ~numpy.isclose(valuation_matrix, 0.0)
if raster_nodata is not None:
valid_mask &= ~utils.array_equals_nodata(
valid_mask &= ~pygeoprocessing.array_equals_nodata(
valuation_matrix, raster_nodata)
visual_quality = numpy.empty(valuation_matrix.shape,
dtype=numpy.float64)

View File

@ -922,10 +922,10 @@ def _calculate_what_drains_to_stream(
"""
drains_to_stream = numpy.full(
flow_dir_mfd.shape, _BYTE_NODATA, dtype=numpy.uint8)
valid_flow_dir = ~utils.array_equals_nodata(
valid_flow_dir = ~pygeoprocessing.array_equals_nodata(
flow_dir_mfd, flow_dir_mfd_nodata)
valid_dist_to_channel = (
~utils.array_equals_nodata(
~pygeoprocessing.array_equals_nodata(
dist_to_channel, dist_to_channel_nodata) &
valid_flow_dir)
@ -1026,8 +1026,8 @@ def _calculate_ls_factor(
# avg aspect intermediate output should always have a defined
# nodata value from pygeoprocessing
valid_mask = (
~utils.array_equals_nodata(percent_slope, slope_nodata) &
~utils.array_equals_nodata(
~pygeoprocessing.array_equals_nodata(percent_slope, slope_nodata) &
~pygeoprocessing.array_equals_nodata(
flow_accumulation, flow_accumulation_nodata))
result = numpy.empty(valid_mask.shape, dtype=numpy.float32)
result[:] = _TARGET_NODATA
@ -1141,13 +1141,13 @@ def _calculate_rkls(
"""
rkls = numpy.empty(ls_factor.shape, dtype=numpy.float32)
nodata_mask = (
~utils.array_equals_nodata(ls_factor, _TARGET_NODATA) &
~utils.array_equals_nodata(stream, stream_nodata))
~pygeoprocessing.array_equals_nodata(ls_factor, _TARGET_NODATA) &
~pygeoprocessing.array_equals_nodata(stream, stream_nodata))
if erosivity_nodata is not None:
nodata_mask &= ~utils.array_equals_nodata(
nodata_mask &= ~pygeoprocessing.array_equals_nodata(
erosivity, erosivity_nodata)
if erodibility_nodata is not None:
nodata_mask &= ~utils.array_equals_nodata(
nodata_mask &= ~pygeoprocessing.array_equals_nodata(
erodibility, erodibility_nodata)
valid_mask = nodata_mask & (stream == 0)
@ -1364,8 +1364,8 @@ def _calculate_ic(d_up_path, d_dn_path, out_ic_factor_path):
def ic_op(d_up, d_dn):
"""Calculate IC factor."""
valid_mask = (
~utils.array_equals_nodata(d_up, _TARGET_NODATA) &
~utils.array_equals_nodata(d_dn, d_dn_nodata) & (d_dn != 0) &
~pygeoprocessing.array_equals_nodata(d_up, _TARGET_NODATA) &
~pygeoprocessing.array_equals_nodata(d_dn, d_dn_nodata) & (d_dn != 0) &
(d_up != 0))
ic_array = numpy.empty(valid_mask.shape, dtype=numpy.float32)
ic_array[:] = _IC_NODATA
@ -1384,7 +1384,7 @@ def _calculate_sdr(
def sdr_op(ic_factor, stream):
"""Calculate SDR factor."""
valid_mask = (
~utils.array_equals_nodata(ic_factor, _IC_NODATA) & (stream != 1))
~pygeoprocessing.array_equals_nodata(ic_factor, _IC_NODATA) & (stream != 1))
result = numpy.empty(valid_mask.shape, dtype=numpy.float32)
result[:] = _TARGET_NODATA
result[valid_mask] = (
@ -1414,8 +1414,8 @@ def _calculate_e_prime(usle_path, sdr_path, stream_path, target_e_prime):
def e_prime_op(usle, sdr, streams):
"""Wash that does not reach stream."""
valid_mask = (
~utils.array_equals_nodata(usle, _TARGET_NODATA) &
~utils.array_equals_nodata(sdr, _TARGET_NODATA))
~pygeoprocessing.array_equals_nodata(usle, _TARGET_NODATA) &
~pygeoprocessing.array_equals_nodata(sdr, _TARGET_NODATA))
result = numpy.empty(valid_mask.shape, dtype=numpy.float32)
result[:] = _TARGET_NODATA
result[valid_mask] = usle[valid_mask] * (1-sdr[valid_mask])

View File

@ -982,7 +982,7 @@ def _calculate_vri(l_path, target_vri_path):
for _, block in pygeoprocessing.iterblocks((l_path, 1)):
valid_mask = (
~utils.array_equals_nodata(block, l_nodata) &
~pygeoprocessing.array_equals_nodata(block, l_nodata) &
(~numpy.isinf(block)))
qb_sum += numpy.sum(block[valid_mask])
qb_valid_count += numpy.count_nonzero(valid_mask)
@ -993,7 +993,7 @@ def _calculate_vri(l_path, target_vri_path):
result = numpy.empty_like(li_array)
result[:] = li_nodata
if qb_sum > 0:
valid_mask = ~utils.array_equals_nodata(li_array, li_nodata)
valid_mask = ~pygeoprocessing.array_equals_nodata(li_array, li_nodata)
try:
result[valid_mask] = li_array[valid_mask] / qb_sum
except RuntimeWarning:
@ -1044,8 +1044,8 @@ def _calculate_monthly_quick_flow(precip_path, n_events_path, stream_path,
Returns:
quick flow (numpy.array)
"""
valid_p_mask = ~utils.array_equals_nodata(p_im, p_nodata)
valid_n_mask = ~utils.array_equals_nodata(n_m, n_nodata)
valid_p_mask = ~pygeoprocessing.array_equals_nodata(p_im, p_nodata)
valid_n_mask = ~pygeoprocessing.array_equals_nodata(n_m, n_nodata)
# precip mask: both p_im and n_m are defined and greater than 0
precip_mask = valid_p_mask & valid_n_mask & (p_im > 0) & (n_m > 0)
stream_mask = stream == 1
@ -1054,8 +1054,8 @@ def _calculate_monthly_quick_flow(precip_path, n_events_path, stream_path,
valid_mask = (
valid_p_mask &
valid_n_mask &
~utils.array_equals_nodata(stream, stream_nodata) &
~utils.array_equals_nodata(s_i, si_nodata))
~pygeoprocessing.array_equals_nodata(stream, stream_nodata) &
~pygeoprocessing.array_equals_nodata(s_i, si_nodata))
# QF is defined in terms of three cases:
#
@ -1274,7 +1274,7 @@ def _calculate_si_raster(cn_path, stream_path, si_path):
def si_op(ci_factor, stream_mask):
"""Calculate si factor."""
valid_mask = (
~utils.array_equals_nodata(ci_factor, cn_nodata) &
~pygeoprocessing.array_equals_nodata(ci_factor, cn_nodata) &
(ci_factor > 0))
si_array = numpy.empty(ci_factor.shape)
si_array[:] = TARGET_NODATA

View File

@ -890,9 +890,9 @@ def volume_op(ratio_array, precip_array, precip_nodata, pixel_area):
"""
volume_array = numpy.full(ratio_array.shape, FLOAT_NODATA,
dtype=numpy.float32)
valid_mask = ~utils.array_equals_nodata(ratio_array, FLOAT_NODATA)
valid_mask = ~pygeoprocessing.array_equals_nodata(ratio_array, FLOAT_NODATA)
if precip_nodata is not None:
valid_mask &= ~utils.array_equals_nodata(precip_array, precip_nodata)
valid_mask &= ~pygeoprocessing.array_equals_nodata(precip_array, precip_nodata)
# precipitation (mm/yr) * pixel area (m^2) *
# 0.001 (m/mm) * ratio = volume (m^3/yr)
@ -943,9 +943,9 @@ def pollutant_load_op(lulc_array, lulc_nodata, volume_array, sorted_lucodes,
"""
load_array = numpy.full(
lulc_array.shape, FLOAT_NODATA, dtype=numpy.float32)
valid_mask = ~utils.array_equals_nodata(volume_array, FLOAT_NODATA)
valid_mask = ~pygeoprocessing.array_equals_nodata(volume_array, FLOAT_NODATA)
if lulc_nodata is not None:
valid_mask &= ~utils.array_equals_nodata(lulc_array, lulc_nodata)
valid_mask &= ~pygeoprocessing.array_equals_nodata(lulc_array, lulc_nodata)
# bin each value in the LULC array such that
# lulc_array[i,j] == sorted_lucodes[lulc_index[i,j]]. thus,
@ -975,7 +975,7 @@ def retention_value_op(retention_volume_array, replacement_cost):
"""
value_array = numpy.full(retention_volume_array.shape, FLOAT_NODATA,
dtype=numpy.float32)
valid_mask = ~utils.array_equals_nodata(
valid_mask = ~pygeoprocessing.array_equals_nodata(
retention_volume_array, FLOAT_NODATA)
# retention (m^3/yr) * replacement cost ($/m^3) = retention value ($/yr)

View File

@ -722,7 +722,7 @@ def _flood_vol_op(
"""
result = numpy.empty(q_pi_array.shape, dtype=numpy.float32)
result[:] = target_nodata
valid_mask = ~utils.array_equals_nodata(q_pi_array, q_pi_nodata)
valid_mask = ~pygeoprocessing.array_equals_nodata(q_pi_array, q_pi_nodata)
# 0.001 converts mm (quickflow) to m (pixel area units)
result[valid_mask] = (
q_pi_array[valid_mask] * pixel_area * 0.001)
@ -747,7 +747,7 @@ def _runoff_retention_vol_op(
"""
result = numpy.empty(runoff_retention_array.shape, dtype=numpy.float32)
result[:] = target_nodata
valid_mask = ~utils.array_equals_nodata(
valid_mask = ~pygeoprocessing.array_equals_nodata(
runoff_retention_array, runoff_retention_nodata)
# the 1e-3 converts the mm of p_value to meters.
result[valid_mask] = (
@ -773,7 +773,7 @@ def _runoff_retention_op(q_pi_array, p_value, q_pi_nodata, result_nodata):
result[:] = result_nodata
valid_mask = numpy.ones(q_pi_array.shape, dtype=bool)
if q_pi_nodata is not None:
valid_mask[:] = ~utils.array_equals_nodata(q_pi_array, q_pi_nodata)
valid_mask[:] = ~pygeoprocessing.array_equals_nodata(q_pi_array, q_pi_nodata)
result[valid_mask] = 1 - (q_pi_array[valid_mask] / p_value)
return result
@ -798,7 +798,7 @@ def _q_pi_op(p_value, s_max_array, s_max_nodata, result_nodata):
zero_mask = (p_value <= lam * s_max_array)
non_nodata_mask = numpy.ones(s_max_array.shape, dtype=bool)
if s_max_nodata is not None:
non_nodata_mask[:] = ~utils.array_equals_nodata(
non_nodata_mask[:] = ~pygeoprocessing.array_equals_nodata(
s_max_array, s_max_nodata)
# valid if not nodata and not going to be set to 0.
@ -828,7 +828,7 @@ def _s_max_op(cn_array, cn_nodata, result_nodata):
zero_mask = cn_array == 0
valid_mask = ~zero_mask
if cn_nodata is not None:
valid_mask[:] &= ~utils.array_equals_nodata(cn_array, cn_nodata)
valid_mask[:] &= ~pygeoprocessing.array_equals_nodata(cn_array, cn_nodata)
result[valid_mask] = 25400 / cn_array[valid_mask] - 254
result[zero_mask] = 0
return result
@ -856,10 +856,10 @@ def _lu_to_cn_op(
result[:] = cn_nodata
valid_mask = numpy.ones(lucode_array.shape, dtype=bool)
if lucode_nodata is not None:
valid_mask[:] &= ~utils.array_equals_nodata(
valid_mask[:] &= ~pygeoprocessing.array_equals_nodata(
lucode_array, lucode_nodata)
if soil_type_nodata is not None:
valid_mask[:] &= ~utils.array_equals_nodata(
valid_mask[:] &= ~pygeoprocessing.array_equals_nodata(
soil_type_array, soil_type_nodata)
# this is an array where each column represents a valid landcover

View File

@ -1662,8 +1662,8 @@ def _weighted_sum(raster_path_list, weight_raster_list, target_path):
for source_array, weight_array, source_nodata, weight_nodata in zip(
pixel_arrays, weight_arrays, raster_nodata_list, weight_nodata_list):
valid_pixels = (
~utils.array_equals_nodata(source_array, source_nodata) &
~utils.array_equals_nodata(weight_array, weight_nodata))
~pygeoprocessing.array_equals_nodata(source_array, source_nodata) &
~pygeoprocessing.array_equals_nodata(weight_array, weight_nodata))
touched_pixels |= valid_pixels
target_array[valid_pixels] += (
source_array[valid_pixels] * weight_array[valid_pixels])
@ -1719,9 +1719,9 @@ def _reclassify_and_multiply(
supply_block = supply_band.ReadAsArray(**block_info)
valid_mask = (
~utils.array_equals_nodata(
~pygeoprocessing.array_equals_nodata(
pop_group_proportion_block, pop_group_nodata) &
~utils.array_equals_nodata(supply_block, supply_nodata))
~pygeoprocessing.array_equals_nodata(supply_block, supply_nodata))
target_block = numpy.full(supply_block.shape, FLOAT32_NODATA,
dtype=numpy.float32)
target_block[valid_mask] = (
@ -2234,7 +2234,7 @@ def _convolve_and_set_lower_bound(
target_nodata = target_band.GetNoDataValue()
for block_data, block in pygeoprocessing.iterblocks(
(target_path, 1)):
valid_pixels = ~utils.array_equals_nodata(block, target_nodata)
valid_pixels = ~pygeoprocessing.array_equals_nodata(block, target_nodata)
block[(block < 0) & valid_pixels] = 0
target_band.WriteArray(
block, xoff=block_data['xoff'], yoff=block_data['yoff'])
@ -2601,7 +2601,7 @@ def _create_valid_pixels_nodata_mask(raster_list, target_mask_path):
def _create_mask(*raster_arrays):
valid_pixels_mask = numpy.ones(raster_arrays[0].shape, dtype=bool)
for nodata, array in zip(nodatas, raster_arrays):
valid_pixels_mask &= ~utils.array_equals_nodata(array, nodata)
valid_pixels_mask &= ~pygeoprocessing.array_equals_nodata(array, nodata)
return valid_pixels_mask

View File

@ -1003,30 +1003,6 @@ def reclassify_raster(
raise ValueError(error_message)
def array_equals_nodata(array, nodata):
"""Check for the presence of ``nodata`` values in ``array``.
The comparison supports ``numpy.nan`` and unset (``None``) nodata values.
Args:
array (numpy array): the array to mask for nodata values.
nodata (number): the nodata value to check for. Supports ``numpy.nan``.
Returns:
A boolean numpy array with values of 1 where ``array`` is equal to
``nodata`` and 0 otherwise.
"""
# If nodata is undefined, nothing matches nodata.
if nodata is None:
return numpy.zeros(array.shape, dtype=bool)
# comparing an integer array against numpy.nan works correctly and is
# faster than using numpy.isclose().
if numpy.issubdtype(array.dtype, numpy.integer):
return array == nodata
return numpy.isclose(array, nodata, equal_nan=True)
def matches_format_string(test_string, format_string):
"""Assert that a given string matches a given format string.

View File

@ -1719,7 +1719,7 @@ def _create_percentile_rasters(base_raster_path, target_raster_path,
def _mask_below_start_value(array):
valid_mask = (
~utils.array_equals_nodata(array, base_nodata) &
~pygeoprocessing.array_equals_nodata(array, base_nodata) &
(array >= float(start_value)))
result = numpy.empty_like(array)
result[:] = base_nodata

View File

@ -1445,7 +1445,7 @@ def _calculate_npv_levelized_rasters(
pygeoprocessing.iterblocks((base_dist_raster_path, 1))):
target_arr_shape = harvest_block_data.shape
target_nodata_mask = utils.array_equals_nodata(
target_nodata_mask = pygeoprocessing.array_equals_nodata(
harvest_block_data, _TARGET_NODATA)
# Total cable distance converted to kilometers
@ -1624,7 +1624,7 @@ def _depth_op(bath, min_depth, max_depth):
out_array = numpy.full(
bath.shape, _TARGET_NODATA, dtype=numpy.float32)
valid_pixels_mask = ((bath >= max_depth) & (bath <= min_depth) &
~utils.array_equals_nodata(bath, _TARGET_NODATA))
~pygeoprocessing.array_equals_nodata(bath, _TARGET_NODATA))
out_array[
valid_pixels_mask] = bath[valid_pixels_mask]
return out_array
@ -1644,7 +1644,7 @@ def _add_avg_dist_op(tmp_dist, avg_grid_distance):
"""
out_array = numpy.full(
tmp_dist.shape, _TARGET_NODATA, dtype=numpy.float32)
valid_pixels_mask = ~utils.array_equals_nodata(tmp_dist, _TARGET_NODATA)
valid_pixels_mask = ~pygeoprocessing.array_equals_nodata(tmp_dist, _TARGET_NODATA)
out_array[valid_pixels_mask] = (
tmp_dist[valid_pixels_mask] + avg_grid_distance)
return out_array
@ -1711,7 +1711,7 @@ def _mask_out_depth_dist(*rasters):
out_array = numpy.full(rasters[0].shape, _TARGET_NODATA, dtype=numpy.float32)
nodata_mask = numpy.full(rasters[0].shape, False, dtype=bool)
for array in rasters:
nodata_mask = nodata_mask | utils.array_equals_nodata(
nodata_mask = nodata_mask | pygeoprocessing.array_equals_nodata(
array, _TARGET_NODATA)
out_array[~nodata_mask] = rasters[0][~nodata_mask]
return out_array
@ -1731,7 +1731,7 @@ def _calculate_carbon_op(harvested_arr, carbon_coef):
"""
out_array = numpy.full(
harvested_arr.shape, _TARGET_NODATA, dtype=numpy.float32)
valid_pixels_mask = ~utils.array_equals_nodata(
valid_pixels_mask = ~pygeoprocessing.array_equals_nodata(
harvested_arr, _TARGET_NODATA)
# The energy value converted from MWhr/yr (Mega Watt hours as output
@ -1877,7 +1877,7 @@ def _mask_by_distance(base_raster_path, min_dist, max_dist, out_nodata,
"""Mask distance values by min/max values."""
out_array = numpy.full(dist_arr.shape, out_nodata, dtype=numpy.float32)
valid_pixels_mask = (
~utils.array_equals_nodata(dist_arr, raster_nodata) &
~pygeoprocessing.array_equals_nodata(dist_arr, raster_nodata) &
(dist_arr >= min_dist) & (dist_arr <= max_dist))
out_array[
valid_pixels_mask] = dist_arr[valid_pixels_mask]

View File

@ -476,7 +476,7 @@ class UNATests(unittest.TestCase):
accessible_urban_nature_array = pygeoprocessing.raster_to_numpy_array(
os.path.join(args['workspace_dir'], 'output',
'accessible_urban_nature_suffix.tif'))
valid_mask = ~utils.array_equals_nodata(
valid_mask = ~pygeoprocessing.array_equals_nodata(
accessible_urban_nature_array, urban_nature_access.FLOAT32_NODATA)
valid_pixels = accessible_urban_nature_array[valid_mask]
self.assertAlmostEqual(numpy.sum(valid_pixels), 6221004.41259766)
@ -595,7 +595,7 @@ class UNATests(unittest.TestCase):
def _read_and_sum_raster(path):
array = pygeoprocessing.raster_to_numpy_array(path)
nodata = pygeoprocessing.get_raster_info(path)['nodata'][0]
return numpy.sum(array[~utils.array_equals_nodata(array, nodata)])
return numpy.sum(array[~pygeoprocessing.array_equals_nodata(array, nodata)])
intermediate_dir = os.path.join(args['workspace_dir'], 'intermediate')
for (supply_type, supply_field), fieldname in itertools.product(
@ -644,7 +644,7 @@ class UNATests(unittest.TestCase):
accessible_urban_nature_array = (
pygeoprocessing.raster_to_numpy_array(path))
valid_mask = ~utils.array_equals_nodata(
valid_mask = ~pygeoprocessing.array_equals_nodata(
accessible_urban_nature_array,
urban_nature_access.FLOAT32_NODATA)
valid_pixels = accessible_urban_nature_array[valid_mask]
@ -810,7 +810,7 @@ class UNATests(unittest.TestCase):
os.path.join(args['workspace_dir'], 'output',
f'urban_nature_demand_{suffix}.tif'))
nodata = urban_nature_access.FLOAT32_NODATA
valid_pixels = ~utils.array_equals_nodata(population, nodata)
valid_pixels = ~pygeoprocessing.array_equals_nodata(population, nodata)
numpy.testing.assert_allclose(
(population[valid_pixels].sum() *
float(args['urban_nature_demand'])),

View File

@ -1754,44 +1754,3 @@ class ReclassifyRasterOpTests(unittest.TestCase):
" the LULC raster but not the table are: [3].")
self.assertTrue(
expected_message in str(context.exception), str(context.exception))
class ArrayEqualsNodataTests(unittest.TestCase):
"""Tests for natcap.invest.utils.array_equals_nodata."""
def test_integer_array(self):
"""Utils: test integer array is returned as expected."""
from natcap.invest import utils
nodata_values = [9, 9.0]
int_array = numpy.array(
[[4, 2, 9], [1, 9, 3], [9, 6, 1]], dtype=numpy.int16)
expected_array = numpy.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]])
for nodata in nodata_values:
result_array = utils.array_equals_nodata(int_array, nodata)
numpy.testing.assert_array_equal(result_array, expected_array)
def test_nan_nodata_array(self):
"""Utils: test array with numpy.nan nodata values."""
from natcap.invest import utils
array = numpy.array(
[[4, 2, numpy.nan], [1, numpy.nan, 3], [numpy.nan, 6, 1]])
expected_array = numpy.array([[0, 0, 1], [0, 1, 0], [1, 0, 0]])
result_array = utils.array_equals_nodata(array, numpy.nan)
numpy.testing.assert_array_equal(result_array, expected_array)
def test_none_nodata(self):
"""Utils: if nodata is undefined (None), everything is valid."""
from natcap.invest import utils
array = numpy.array(
[[4, 2, numpy.nan], [1, numpy.nan, 3], [numpy.nan, 6, 1]])
result_array = utils.array_equals_nodata(array, None)
numpy.testing.assert_array_equal(
result_array, numpy.zeros(array.shape, dtype=bool))