rename spec_utils to spec

This commit is contained in:
Emily Soth 2025-05-05 18:35:56 -07:00
parent fddbac6080
commit bd8e0eb435
42 changed files with 608 additions and 608 deletions

View File

@ -11,7 +11,7 @@ from osgeo import gdal
from osgeo import ogr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -104,7 +104,7 @@ WATERSHED_OUTPUT_FIELDS = {
**VALUATION_OUTPUT_FIELDS
}
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "annual_water_yield",
"model_title": gettext("Annual Water Yield"),
"userguide": "annual_water_yield.html",
@ -130,13 +130,13 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": False,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"lulc_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"about": spec_utils.LULC['about'] + " " + gettext(
"about": spec.LULC['about'] + " " + gettext(
"All values in this raster must have corresponding entries "
"in the Biophysical Table.")
},
@ -154,7 +154,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("root restricting layer depth")
},
"precipitation_path": {
**spec_utils.PRECIP,
**spec.PRECIP,
"projected": True
},
"pawc_path": {
@ -168,7 +168,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("plant available water content")
},
"eto_path": {
**spec_utils.ET0,
**spec.ET0,
"projected": True
},
"watersheds_path": {
@ -180,7 +180,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": gettext("Unique identifier for each watershed.")
}
},
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"about": gettext(
"Map of watershed boundaries, such that each watershed drains "
"to a point of interest where hydropower production will be "
@ -196,7 +196,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": gettext("Unique identifier for each subwatershed.")
}
},
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"required": False,
"about": gettext(
"Map of subwatershed boundaries within each watershed in "
@ -206,7 +206,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"biophysical_table_path": {
"type": "csv",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"lulc_veg": {
"type": "integer",
"about": gettext(
@ -345,7 +345,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"contents": {
"watershed_results_wyield.shp": {
"fields": {**WATERSHED_OUTPUT_FIELDS},
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"about": "Shapefile containing biophysical output values per watershed."
},
"watershed_results_wyield.csv": {
@ -355,7 +355,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"subwatershed_results_wyield.shp": {
"fields": {**SUBWATERSHED_OUTPUT_FIELDS},
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"about": "Shapefile containing biophysical output values per subwatershed."
},
"subwatershed_results_wyield.csv": {
@ -444,7 +444,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_dir": spec_utils.TASKGRAPH_DIR
"taskgraph_dir": spec.TASKGRAPH_DIR
}
})

View File

@ -12,7 +12,7 @@ import taskgraph
from . import validation
from . import utils
from . import spec_utils
from . import spec
from .unit_registry import u
from . import gettext
@ -38,7 +38,7 @@ CARBON_OUTPUTS = {
]
}
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "carbon",
"model_title": gettext("Carbon Storage and Sequestration"),
"userguide": "carbonstorage.html",
@ -57,11 +57,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"spatial_keys": ["lulc_bas_path", "lulc_alt_path"],
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"lulc_bas_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"projection_units": u.meter,
"about": gettext(
@ -80,7 +80,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("calculate sequestration")
},
"lulc_alt_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"projection_units": u.meter,
"required": "calc_sequestration",
@ -96,7 +96,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"carbon_pools_path": {
"type": "csv",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"c_above": {
"type": "number",
"units": u.metric_ton/u.hectare,
@ -226,7 +226,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
**CARBON_OUTPUTS
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -15,7 +15,7 @@ import warnings
import natcap.invest
from natcap.invest import datastack
from natcap.invest import spec_utils
from natcap.invest import spec
from natcap.invest import ui_server
from natcap.invest import utils
from pygeoprocessing.geoprocessing_core import GDALUseExceptions
@ -23,7 +23,7 @@ with GDALUseExceptions():
import natcap.invest
from natcap.invest import datastack
from natcap.invest import set_locale
from natcap.invest import spec_utils
from natcap.invest import spec
from natcap.invest import ui_server
from natcap.invest import utils
from natcap.invest import models
@ -492,7 +492,7 @@ def main(user_args=None):
try:
# If there's an exception from creating metadata
# I don't think we want to indicate a model failure
spec_utils.generate_metadata(model_module, parsed_datastack.args)
spec.generate_metadata(model_module, parsed_datastack.args)
except Exception as exc:
LOGGER.warning(
'Something went wrong while generating metadata', exc_info=exc)

View File

@ -104,7 +104,7 @@ import taskgraph
from osgeo import gdal
from .. import utils
from .. import spec_utils
from .. import spec
from ..unit_registry import u
from .. import validation
from .. import gettext
@ -251,7 +251,7 @@ BIOPHYSICAL_TABLE_COLUMNS = {
"Annual rate of CO2E accumulation in the litter pool.")}
}
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "coastal_blue_carbon",
"model_title": gettext("Coastal Blue Carbon"),
"userguide": "coastal_blue_carbon.html",
@ -265,9 +265,9 @@ MODEL_SPEC = spec_utils.build_model_spec({
"hidden": ["n_workers"]
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"landcover_snapshot_csv": {
"type": "csv",
"index_col": "snapshot_year",
@ -538,7 +538,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -9,7 +9,7 @@ import taskgraph
from osgeo import gdal
from .. import gettext
from .. import spec_utils
from .. import spec
from .. import utils
from .. import validation
from ..unit_registry import u
@ -17,7 +17,7 @@ from . import coastal_blue_carbon
LOGGER = logging.getLogger(__name__)
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "coastal_blue_carbon_preprocessor",
"model_title": gettext("Coastal Blue Carbon Preprocessor"),
"userguide": "coastal_blue_carbon.html",
@ -30,9 +30,9 @@ MODEL_SPEC = spec_utils.build_model_spec({
"hidden": ["n_workers"]
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"lulc_lookup_table_path": {
"name": gettext("LULC lookup table"),
"type": "csv",
@ -138,7 +138,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"to match all the other LULC maps."),
"bands": {1: {"type": "integer"}}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -26,7 +26,7 @@ from shapely.geometry.base import BaseMultipartGeometry
from shapely.strtree import STRtree
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -143,7 +143,7 @@ def get_vector_colnames(vector_path):
return []
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "coastal_vulnerability",
"model_title": gettext("Coastal Vulnerability"),
"userguide": "coastal_vulnerability.html",
@ -176,11 +176,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"aoi_vector_path": {
**spec_utils.AOI,
**spec.AOI,
"projected": True,
"projection_units": u.meter,
"about": gettext("Map of the region over which to run the model.")
@ -196,7 +196,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"landmass_vector_path": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"about": gettext(
"Map of all landmasses in and around the region of interest. "
"It is not recommended to clip this landmass to the AOI "
@ -209,7 +209,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"wwiii_vector_path": {
"type": "vector",
"fields": WWIII_FIELDS,
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"about": gettext(
"Map of gridded wind and wave data that represent storm "
"conditions. This global dataset is provided with the InVEST "
@ -248,14 +248,14 @@ MODEL_SPEC = spec_utils.build_model_spec({
"shelf_contour_vector_path": {
"type": "vector",
"fields": {},
"geometries": spec_utils.LINES,
"geometries": spec.LINES,
"about": gettext(
"Map of the edges of the continental shelf or other locally "
"relevant bathymetry contour."),
"name": gettext("continental shelf contour")
},
"dem_path": {
**spec_utils.DEM,
**spec.DEM,
"bands": {1: {
"type": "number",
"units": u.other # any unit of length is ok
@ -326,7 +326,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": gettext("Relative exposure of the segment of coastline.")
}
},
"geometries": spec_utils.LINES,
"geometries": spec.LINES,
"required": False,
"about": gettext("Map of relative exposure of each segment of coastline."),
"name": gettext("geomorphology")
@ -380,7 +380,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"units": u.none
}
},
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"required": False,
"about": gettext(
"Map of sea level rise rates or amounts. May be any sea level "
@ -402,7 +402,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"outputs": {
"coastal_exposure.gpkg": {
"about": "This point vector file contains the final outputs of the model. The points are created based on the input model resolution, landmass, and AOI.",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": FINAL_OUTPUT_FIELDS
},
"coastal_exposure.csv": {
@ -416,7 +416,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"intermediate_exposure.gpkg": {
"about": (
"Shore points with associated exposure variables"),
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {
"shore_id": {
"type": "integer",
@ -452,7 +452,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"geomorphology_protected.gpkg": {
"about": (
"Geomorphology vector reprojected to match the AOI"),
"geometries": spec_utils.LINES,
"geometries": spec.LINES,
"fields": {
"rank": {
"type": "option_string",
@ -477,7 +477,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"of either the geomorphology or landmass polygon "
"inputs. Editing the geometory of one or both in "
"GIS could help resolve this."),
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {
"shore_id": {
"type": "integer",
@ -581,7 +581,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"clipped_projected_landmass.gpkg": {
"about": "Clipped and reprojected landmass map",
"fields": {},
"geometries": spec_utils.POLYGONS
"geometries": spec.POLYGONS
},
"landmass_line_index.pickle": {
"about": "Pickled landmass index"
@ -593,7 +593,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"shore_points.gpkg": {
"about": "Map of shore points",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {
"shore_id": {
"type": "integer",
@ -604,7 +604,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"tmp_clipped_landmass.gpkg": {
"about": "Clipped landmass map",
"fields": {},
"geometries": spec_utils.POLYGONS
"geometries": spec.POLYGONS
}
}
},
@ -627,7 +627,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"fetch_points.gpkg": {
"about": "Shore points with added fetch ray data",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {
**WWIII_FIELDS,
"fdist_[SECTOR]": {
@ -648,7 +648,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"fetch_rays.gpkg": {
"about": (
"Map of fetch rays around each shore point."),
"geometries": spec_utils.LINESTRING,
"geometries": spec.LINESTRING,
"fields": {
"fetch_dist": {
"about": "Fetch distance along the ray",
@ -670,7 +670,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"wave_energies.gpkg": {
"about": "Shore points with associated wave energy data",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {
"E_ocean": {
"about": (
@ -727,14 +727,14 @@ MODEL_SPEC = spec_utils.build_model_spec({
"wind.pickle": {"about": "Pickled wind data"},
"wwiii_shore_points.gpkg": {
"about": "WaveWatch 3 data interpolated to shore points",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": WWIII_FIELDS
}
}
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})
@ -3515,7 +3515,7 @@ def validate(args, limit_to=None):
'slr_field' in sufficient_keys):
fieldnames = validation.load_fields_from_vector(
args['slr_vector_path'])
error_msg = spec_utils.OptionStringInput(
error_msg = spec.OptionStringInput(
options=fieldnames).validate(args['slr_field'])
if error_msg:
validation_warnings.append((['slr_field'], error_msg))

View File

@ -12,7 +12,7 @@ from osgeo import gdal
from osgeo import osr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .crop_production_regression import NUTRIENTS
@ -240,7 +240,7 @@ nutrient_units = {
"vitk": u.microgram/u.hectogram, # vitamin K
}
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "crop_production_percentile",
"model_title": gettext("Crop Production: Percentile"),
"userguide": "crop_production.html",
@ -260,11 +260,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"landcover_raster_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"projection_units": u.meter
},
@ -285,7 +285,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("LULC to Crop Table")
},
"aggregate_polygon_path": {
**spec_utils.AOI,
**spec.AOI,
"projected": True,
"required": False
},
@ -510,7 +510,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -10,7 +10,7 @@ from osgeo import gdal
from osgeo import osr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -66,7 +66,7 @@ NUTRIENTS = [
("vitk", "vitamin K", u.microgram/u.hectogram)
]
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "crop_production_regression",
"model_title": gettext("Crop Production: Regression"),
"userguide": "crop_production.html",
@ -83,11 +83,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"landcover_raster_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"projection_units": u.meter
},
@ -127,7 +127,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("fertilization rate table")
},
"aggregate_polygon_path": {
**spec_utils.AOI,
**spec.AOI,
"required": False
},
"model_data_path": {
@ -276,7 +276,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"contents": {
"aggregate_vector.shp": {
"about": "Copy of input AOI vector",
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {}
},
"clipped_[CROP]_climate_bin_map.tif": {
@ -329,7 +329,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -37,7 +37,7 @@ from osgeo import gdal
from . import utils
from . import validation
from . import models
from . import spec_utils
from . import spec
try:
from . import __version__
@ -200,10 +200,10 @@ def build_datastack_archive(args, model_id, datastack_path):
files_found = {}
LOGGER.debug(f'Keys: {sorted(args.keys())}')
spatial_types = {spec_utils.SingleBandRasterInput, spec_utils.VectorInput,
spec_utils.RasterOrVectorInput}
spatial_types = {spec.SingleBandRasterInput, spec.VectorInput,
spec.RasterOrVectorInput}
file_based_types = spatial_types.union({
spec_utils.CSVInput, spec_utils.FileInput, spec_utils.DirectoryInput})
spec.CSVInput, spec.FileInput, spec.DirectoryInput})
rewritten_args = {}
for key in args:
# Allow the model to override specific arguments in datastack archive
@ -260,7 +260,7 @@ def build_datastack_archive(args, model_id, datastack_path):
rewritten_args[key] = files_found[source_path]
continue
if input_spec.__class__ is spec_utils.CSVInput:
if input_spec.__class__ is spec.CSVInput:
# check the CSV for columns that may be spatial.
# But also, the columns specification might not be listed, so don't
# require that 'columns' exists in the MODEL_SPEC.
@ -342,7 +342,7 @@ def build_datastack_archive(args, model_id, datastack_path):
target_arg_value = target_csv_path
files_found[source_path] = target_arg_value
elif input_spec.__class__ is spec_utils.FileInput:
elif input_spec.__class__ is spec.FileInput:
target_filepath = os.path.join(
data_dir, f'{key}_file')
shutil.copyfile(source_path, target_filepath)
@ -351,7 +351,7 @@ def build_datastack_archive(args, model_id, datastack_path):
target_arg_value = target_filepath
files_found[source_path] = target_arg_value
elif input_spec.__class__ is spec_utils.DirectoryInput:
elif input_spec.__class__ is spec.DirectoryInput:
# copy the whole folder
target_directory = os.path.join(data_dir, f'{key}_directory')
os.makedirs(target_directory)

View File

@ -15,7 +15,7 @@ from osgeo import ogr
from osgeo import osr
from .. import gettext
from .. import spec_utils
from .. import spec
from .. import utils
from .. import validation
from ..unit_registry import u
@ -23,7 +23,7 @@ from . import delineateit_core
LOGGER = logging.getLogger(__name__)
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "delineateit",
"model_title": gettext("DelineateIt"),
"userguide": "delineateit.html",
@ -41,11 +41,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"dem_path": {
**spec_utils.DEM,
**spec.DEM,
"projected": True
},
"detect_pour_points": {
@ -60,7 +60,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"outlet_vector_path": {
"type": "vector",
"fields": {},
"geometries": spec_utils.ALL_GEOMS,
"geometries": spec.ALL_GEOMS,
"required": "not detect_pour_points",
"allowed": "not detect_pour_points",
"about": gettext(
@ -82,11 +82,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("snap points to the nearest stream")
},
"flow_threshold": {
**spec_utils.THRESHOLD_FLOW_ACCUMULATION,
**spec.THRESHOLD_FLOW_ACCUMULATION,
"required": "snap_points",
"allowed": "snap_points",
"about": gettext(
spec_utils.THRESHOLD_FLOW_ACCUMULATION["about"] +
spec.THRESHOLD_FLOW_ACCUMULATION["about"] +
" Required if Snap Points is selected."),
},
"snap_distance": {
@ -113,18 +113,18 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
},
"outputs": {
"filled_dem.tif": spec_utils.FILLED_DEM,
"flow_direction.tif": spec_utils.FLOW_DIRECTION_D8,
"flow_accumulation.tif": spec_utils.FLOW_ACCUMULATION,
"filled_dem.tif": spec.FILLED_DEM,
"flow_direction.tif": spec.FLOW_DIRECTION_D8,
"flow_accumulation.tif": spec.FLOW_ACCUMULATION,
"preprocessed_geometries.gpkg": {
"about": (
"A vector containing only those geometries that the model can "
"verify are valid. The geometries appearing in this vector "
"will be the ones passed to watershed delineation."),
"geometries": spec_utils.ALL_GEOMS,
"geometries": spec.ALL_GEOMS,
"fields": {}
},
"streams.tif": spec_utils.STREAM,
"streams.tif": spec.STREAM,
"snapped_outlets.gpkg": {
"about": (
"A vector that indicates where outlet points (point "
@ -132,7 +132,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"Threshold Flow Accumulation and Pixel Distance to Snap "
"Outlet Points. Any non-point geometries will also have been "
"copied over to this vector, but will not have been altered."),
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {}
},
"watersheds.gpkg": {
@ -140,16 +140,16 @@ MODEL_SPEC = spec_utils.build_model_spec({
"A vector defining the areas that are upstream from the "
"snapped outlet points, where upstream area is defined by the "
"D8 flow algorithm implementation in PyGeoprocessing."),
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"fields": {}
},
"pour_points.gpkg": {
"about": (
"Points where water flows off the defined area of the map."),
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -18,7 +18,7 @@ from osgeo import gdal
from osgeo import ogr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -31,7 +31,7 @@ DISTANCE_UPPER_BOUND = 500e3
# helpful to have a global nodata defined for the whole model
NODATA_VALUE = -1
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "forest_carbon_edge_effect",
"model_title": gettext("Forest Carbon Edge Effect"),
"userguide": "carbon_edge.html",
@ -49,9 +49,9 @@ MODEL_SPEC = spec_utils.build_model_spec({
"spatial_keys": ["aoi_vector_path", "lulc_raster_path"],
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"n_nearest_model_points": {
"expression": "value > 0 and value.is_integer()",
"type": "number",
@ -69,7 +69,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("number of points to average")
},
"aoi_vector_path": {
**spec_utils.AOI,
**spec.AOI,
"projected": True,
"required": False
},
@ -77,7 +77,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"type": "csv",
"index_col": "lucode",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"is_tropical_forest": {
"type": "boolean",
"about": gettext(
@ -121,8 +121,8 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("biophysical table")
},
"lulc_raster_path": {
**spec_utils.LULC,
"about": spec_utils.LULC['about'] + " " + gettext(
**spec.LULC,
"about": spec.LULC['about'] + " " + gettext(
"All values in this raster must "
"have corresponding entries in the Biophysical Table."),
"projected": True
@ -176,7 +176,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"θ₃ parameter for the regression equation. "
"Used only for the asymptotic model.")}
},
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"required": "compute_forest_edge_effects",
"allowed": "compute_forest_edge_effects",
"about": gettext(
@ -210,7 +210,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"aggregated_carbon_stocks.shp": {
"about": "AOI map with aggregated carbon statistics.",
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"c_sum": {
"type": "number",
@ -247,7 +247,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": (
"The regression parameters reprojected to match your "
"study area."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {}
},
"edge_distance.tif": {
@ -266,7 +266,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -11,7 +11,7 @@ import taskgraph
from osgeo import gdal
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -32,7 +32,7 @@ MISSING_MAX_DIST_MSG = gettext(
"Maximum distance value is missing for threats: {threat_list}.")
MISSING_WEIGHT_MSG = gettext("Weight value is missing for threats: {threat_list}.")
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "habitat_quality",
"model_title": gettext("Habitat Quality"),
"userguide": "habitat_quality.html",
@ -52,11 +52,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"lulc_cur_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"about": gettext(
"Map of LULC at present. All values in this raster must "
@ -64,7 +64,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("current land cover")
},
"lulc_fut_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"required": False,
"about": gettext(
@ -75,7 +75,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("future land cover")
},
"lulc_bas_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"required": False,
"about": gettext(
@ -176,7 +176,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"represents completely accessible.")
}
},
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"required": False,
"about": gettext(
"Map of the relative protection that legal, institutional, "
@ -188,7 +188,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"type": "csv",
"index_col": "lucode",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"name": {
"type": "freestyle_string",
"required": False
@ -384,7 +384,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})
# All out rasters besides rarity should be gte to 0. Set nodata accordingly.

View File

@ -17,7 +17,7 @@ from osgeo import ogr
from osgeo import osr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -48,7 +48,7 @@ _DEFAULT_GTIFF_CREATION_OPTIONS = (
'TILED=YES', 'BIGTIFF=YES', 'COMPRESS=DEFLATE',
'BLOCKXSIZE=256', 'BLOCKYSIZE=256')
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "habitat_risk_assessment",
"model_title": gettext("Habitat Risk Assessment"),
"userguide": "habitat_risk_assessment.html",
@ -66,9 +66,9 @@ MODEL_SPEC = spec_utils.build_model_spec({
"hidden": ["n_workers"]
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"info_table_path": {
"name": gettext("habitat stressor table"),
"about": gettext("A table describing each habitat and stressor."),
@ -92,7 +92,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"values besides 0 or 1 will be treated as 0.")
}},
"fields": {},
"geometries": spec_utils.ALL_GEOMS,
"geometries": spec.ALL_GEOMS,
"about": gettext(
"Map of where the habitat or stressor exists. For "
"rasters, a pixel value of 1 indicates presence of "
@ -184,7 +184,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
},
"aoi_vector_path": {
**spec_utils.AOI,
**spec.AOI,
"projected": True,
"projection_units": u.meter,
"fields": {
@ -297,7 +297,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": (
"Map of habitat-specific risk visualized in gradient "
"color from white to red on a map."),
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"fields": {
"Risk Score": {
"type": "integer",
@ -312,7 +312,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": (
"Map of ecosystem risk visualized in gradient "
"color from white to red on a map."),
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"fields": {
"Risk Score": {
"type": "integer",
@ -325,7 +325,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"STRESSOR_[STRESSOR].geojson": {
"about": "Map of stressor extent visualized in orange color.",
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"fields": {}
},
"SUMMARY_STATISTICS.csv": {
@ -395,7 +395,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"polygonized_[HABITAT/STRESSOR].gpkg": {
"about": "Polygonized habitat or stressor map",
"fields": {},
"geometries": spec_utils.POLYGON
"geometries": spec.POLYGON
},
"reclass_[HABITAT]_[STRESSOR].tif": {
"about": (
@ -419,7 +419,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"were provided in a spatial vector format, it will be "
"reprojected to the AOI projection."),
"fields": {},
"geometries": spec_utils.POLYGONS
"geometries": spec.POLYGONS
},
"rewritten_[HABITAT/STRESSOR/CRITERIA].tif": {
"about": (
@ -439,11 +439,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"provided are simplified to 1/2 the user-defined "
"raster resolution in order to speed up rasterization."),
"fields": {},
"geometries": spec_utils.POLYGONS
"geometries": spec.POLYGONS
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -12,7 +12,7 @@ from osgeo import gdal
from osgeo import ogr
from .. import gettext
from .. import spec_utils
from .. import spec
from .. import utils
from .. import validation
from ..sdr import sdr
@ -23,7 +23,7 @@ LOGGER = logging.getLogger(__name__)
MISSING_NUTRIENT_MSG = gettext('Either calc_n or calc_p must be True')
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "ndr",
"model_title": gettext("Nutrient Delivery Ratio"),
"userguide": "ndr.html",
@ -44,17 +44,17 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"dem_path": {
**spec_utils.DEM,
**spec.DEM,
"projected": True
},
"lulc_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"about": spec_utils.LULC['about'] + " " + gettext(
"about": spec.LULC['about'] + " " + gettext(
"All values in this raster must "
"have corresponding entries in the Biophysical table.")
},
@ -74,7 +74,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"watersheds_path": {
"type": "vector",
"projected": True,
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {},
"about": gettext(
"Map of the boundaries of the watershed(s) over which to "
@ -85,7 +85,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"type": "csv",
"index_col": "lucode",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"load_n": {
"type": "number",
"units": u.kilogram/u.hectare/u.year,
@ -158,7 +158,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("calculate nitrogen")
},
"threshold_flow_accumulation": {
**spec_utils.THRESHOLD_FLOW_ACCUMULATION
**spec.THRESHOLD_FLOW_ACCUMULATION
},
"k_param": {
"type": "number",
@ -208,12 +208,12 @@ MODEL_SPEC = spec_utils.build_model_spec({
"retention due to biochemical degradation in soils. Required "
"if Calculate Nitrogen is selected.")
},
**spec_utils.FLOW_DIR_ALGORITHM
**spec.FLOW_DIR_ALGORITHM
},
"outputs": {
"watershed_results_ndr.gpkg": {
"about": "Vector with aggregated nutrient model results per watershed.",
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"p_surface_load": {
"type": "number",
@ -321,8 +321,8 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": "Effective phosphorus retention provided by the downslope flow path for each pixel",
"bands": {1: {"type": "ratio"}}
},
"flow_accumulation.tif": spec_utils.FLOW_ACCUMULATION,
"flow_direction.tif": spec_utils.FLOW_DIRECTION,
"flow_accumulation.tif": spec.FLOW_ACCUMULATION,
"flow_direction.tif": spec.FLOW_DIRECTION,
"ic_factor.tif": {
"about": "Index of connectivity",
"bands": {1: {"type": "ratio"}}
@ -379,7 +379,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": "Inverse of slope",
"bands": {1: {"type": "number", "units": u.none}}
},
"stream.tif": spec_utils.STREAM,
"stream.tif": spec.STREAM,
"sub_load_n.tif": {
"about": "Nitrogen loads for subsurface transport",
"bands": {1: {
@ -449,8 +449,8 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": "Runoff proxy input masked to exclude pixels outside the watershed",
"bands": {1: {"type": "number", "units": u.none}}
},
"filled_dem.tif": spec_utils.FILLED_DEM,
"slope.tif": spec_utils.SLOPE,
"filled_dem.tif": spec.FILLED_DEM,
"slope.tif": spec.SLOPE,
"subsurface_export_n.pickle": {
"about": "Pickled zonal statistics of nitrogen subsurface export"
},
@ -474,7 +474,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -15,14 +15,14 @@ from osgeo import gdal
from osgeo import ogr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
LOGGER = logging.getLogger(__name__)
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "pollination",
"model_title": gettext("Crop Pollination"),
"userguide": "croppollination.html",
@ -36,11 +36,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"hidden": ["n_workers"]
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"landcover_raster_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"about": gettext(
"Map of LULC codes. All values in this raster must have "
@ -99,7 +99,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"type": "csv",
"index_col": "lucode",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"nesting_[SUBSTRATE]_availability_index": {
"type": "ratio",
"about": gettext(
@ -174,7 +174,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"The proportion of pollination required on the farm "
"that is provided by managed pollinators.")}
},
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"required": False,
"about": gettext(
"Map of farm sites to be analyzed, with pollination data "
@ -187,7 +187,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"created_if": "farm_vector_path",
"about": gettext(
"A copy of the input farm polygon vector file with additional fields"),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"p_abund": {
"about": (
@ -319,11 +319,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"reprojected_farm_vector.shp": {
"about": "Farm vector reprojected to the LULC projection",
"fields": {},
"geometries": spec_utils.POLYGONS
"geometries": spec.POLYGONS
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -30,7 +30,7 @@ from osgeo import osr
# prefer to do intrapackage imports to avoid case where global package is
# installed and we import the global version of it rather than the local
from .. import gettext
from .. import spec_utils
from .. import spec
from .. import utils
from .. import validation
from ..unit_registry import u
@ -58,7 +58,7 @@ predictor_table_columns = {
"about": gettext("A spatial file to use as a predictor."),
"bands": {1: {"type": "number", "units": u.none}},
"fields": {},
"geometries": spec_utils.ALL_GEOMS
"geometries": spec.ALL_GEOMS
},
"type": {
"type": "option_string",
@ -99,7 +99,7 @@ predictor_table_columns = {
}
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "recreation",
"model_title": gettext("Visitation: Recreation and Tourism"),
"userguide": "recreation.html",
@ -115,11 +115,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"hidden": ['n_workers', 'hostname', 'port']
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"aoi_path": {
**spec_utils.AOI,
**spec.AOI,
"about": gettext("Map of area(s) over which to run the model.")
},
"hostname": {
@ -234,7 +234,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"PUD_results.gpkg": {
"about": gettext(
"Results of photo-user-days aggregations in the AOI."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"PUD_YR_AVG": {
"about": gettext(
@ -253,7 +253,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"TUD_results.gpkg": {
"about": gettext(
"Results of twitter-user-days aggregations in the AOI."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"PUD_YR_AVG": {
"about": gettext(
@ -308,7 +308,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": gettext(
"AOI polygons with all the variables needed to compute a regression, "
"including predictor attributes and the user-days response variable."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"[PREDICTOR]": {
"type": "number",
@ -353,7 +353,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": gettext(
"Results of scenario, including the predictor data used in the "
"scenario and the predicted visitation patterns for the scenario."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"[PREDICTOR]": {
"type": "number",
@ -378,7 +378,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": gettext(
"Copy of the input AOI, gridded if applicable."),
"fields": {},
"geometries": spec_utils.POLYGONS
"geometries": spec.POLYGONS
},
"aoi.zip": {
"about": gettext("Compressed AOI")
@ -411,7 +411,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -7,7 +7,7 @@ import pygeoprocessing.routing
import taskgraph
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -16,7 +16,7 @@ LOGGER = logging.getLogger(__name__)
INVALID_BAND_INDEX_MSG = gettext('Must be between 1 and {maximum}')
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "routedem",
"model_title": gettext("RouteDEM"),
"userguide": "routedem.html",
@ -34,10 +34,10 @@ MODEL_SPEC = spec_utils.build_model_spec({
"hidden": ["n_workers"]
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"dem_path": spec_utils.DEM,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"dem_path": spec.DEM,
"dem_band_index": {
"type": "number",
"expression": "value >= 1",
@ -87,11 +87,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("calculate streams")
},
"threshold_flow_accumulation": {
**spec_utils.THRESHOLD_FLOW_ACCUMULATION,
**spec.THRESHOLD_FLOW_ACCUMULATION,
"required": "calculate_stream_threshold",
"allowed": "calculate_stream_threshold",
"about": (
spec_utils.THRESHOLD_FLOW_ACCUMULATION['about'] + " " +
spec.THRESHOLD_FLOW_ACCUMULATION['about'] + " " +
gettext("Required if Calculate Streams is selected."))
},
"calculate_downslope_distance": {
@ -125,17 +125,17 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
},
"outputs": {
"taskgraph_cache": spec_utils.TASKGRAPH_DIR,
"filled.tif": spec_utils.FILLED_DEM,
"flow_accumulation.tif": spec_utils.FLOW_ACCUMULATION,
"flow_direction.tif": spec_utils.FLOW_DIRECTION,
"slope.tif": spec_utils.SLOPE,
"stream_mask.tif": spec_utils.STREAM,
"taskgraph_cache": spec.TASKGRAPH_DIR,
"filled.tif": spec.FILLED_DEM,
"flow_accumulation.tif": spec.FLOW_ACCUMULATION,
"flow_direction.tif": spec.FLOW_DIRECTION,
"slope.tif": spec.SLOPE,
"stream_mask.tif": spec.STREAM,
"strahler_stream_order.gpkg": {
"about": (
"A vector of line segments indicating the Strahler stream "
"order and other properties of each stream segment."),
"geometries": spec_utils.LINESTRING,
"geometries": spec.LINESTRING,
"fields": {
"order": {
"about": "The Strahler stream order.",
@ -238,7 +238,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"subwatersheds. A new subwatershed is created for each "
"tributary of a stream and is influenced greatly by "
"your choice of Threshold Flow Accumulation value."),
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"fields": {
"stream_id": {
"about": (

View File

@ -17,7 +17,7 @@ import taskgraph
from osgeo import gdal
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -28,7 +28,7 @@ MISSING_CONVERT_OPTION_MSG = gettext(
'One or more of "convert_nearest_to_edge" or "convert_farthest_from_edge" '
'must be selected')
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "scenario_generator_proximity",
"model_title": gettext("Scenario Generator: Proximity Based"),
"userguide": "scenario_gen_proximity.html",
@ -43,11 +43,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"hidden": ["n_workers"]
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"base_lulc_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"about": gettext("Base map from which to generate scenarios."),
"name": gettext("base LULC map")
@ -96,7 +96,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("number of conversion steps")
},
"aoi_path": {
**spec_utils.AOI,
**spec.AOI,
"required": False,
"about": gettext(
"Area over which to run the conversion. Provide this input if "
@ -198,7 +198,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -17,7 +17,7 @@ from osgeo import ogr
from osgeo import osr
from .. import gettext
from .. import spec_utils
from .. import spec
from .. import utils
from .. import validation
from ..unit_registry import u
@ -46,7 +46,7 @@ _INTERMEDIATE_BASE_FILES = {
'value_pattern': 'value_{id}.tif',
}
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "scenic_quality",
"model_title": gettext("Scenic Quality"),
"userguide": "scenic_quality.html",
@ -64,16 +64,16 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"aoi_path": {
**spec_utils.AOI,
**spec.AOI,
},
"structure_path": {
"name": gettext("features impacting scenic quality"),
"type": "vector",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {
"radius": {
"type": "number",
@ -111,7 +111,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"quality. This must have the same projection as the DEM.")
},
"dem_path": {
**spec_utils.DEM,
**spec.DEM,
"projected": True,
"projection_units": u.meter
},
@ -196,7 +196,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"contents": {
"aoi_reprojected.shp": {
"about": gettext("This vector is the AOI, reprojected to the DEMs spatial reference and projection."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {}
},
"dem_clipped.tif": {
@ -206,12 +206,12 @@ MODEL_SPEC = spec_utils.build_model_spec({
"structures_clipped.shp": {
"about": gettext(
"Copy of the structures vector, clipped to the AOI extent."),
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {}
},
"structures_reprojected.shp": {
"about": gettext("Copy of the structures vector, reprojected to the DEMs spatial reference and projection."),
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {}
},
"value_[FEATURE_ID].tif": {
@ -224,7 +224,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -18,7 +18,7 @@ from osgeo import gdal
from osgeo import ogr
from .. import gettext
from .. import spec_utils
from .. import spec
from .. import urban_nature_access
from .. import utils
from .. import validation
@ -27,7 +27,7 @@ from . import sdr_core
LOGGER = logging.getLogger(__name__)
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "sdr",
"model_title": gettext("Sediment Delivery Ratio"),
"userguide": "sdr.html",
@ -48,11 +48,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": False,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"dem_path": {
**spec_utils.DEM,
**spec.DEM,
"projected": True
},
"erosivity_path": {
@ -79,15 +79,15 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("soil erodibility")
},
"lulc_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"about": spec_utils.LULC['about'] + " " + gettext(
"about": spec.LULC['about'] + " " + gettext(
"All values in this raster must "
"have corresponding entries in the Biophysical Table.")
},
"watersheds_path": {
"type": "vector",
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"projected": True,
"fields": {},
"about": gettext(
@ -100,7 +100,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"type": "csv",
"index_col": "lucode",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"usle_c": {
"type": "ratio",
"about": gettext("Cover-management factor for the USLE")},
@ -114,7 +114,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"corresponding entries in this table."),
"name": gettext("biophysical table")
},
"threshold_flow_accumulation": spec_utils.THRESHOLD_FLOW_ACCUMULATION,
"threshold_flow_accumulation": spec.THRESHOLD_FLOW_ACCUMULATION,
"k_param": {
"type": "number",
"units": u.none,
@ -151,7 +151,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"streams. Pixels with 0 are not drainages."),
"name": gettext("drainages")
},
**spec_utils.FLOW_DIR_ALGORITHM
**spec.FLOW_DIR_ALGORITHM
},
"outputs": {
"avoided_erosion.tif": {
@ -189,7 +189,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"units": u.metric_ton/u.hectare
}}
},
"stream.tif": spec_utils.STREAM,
"stream.tif": spec.STREAM,
"stream_and_drainage.tif": {
"created_if": "drainage_path",
"about": "This raster is the union of that layer with the calculated stream layer(Eq. (85)). Values of 1 represent streams, values of 0 are non-stream pixels.",
@ -204,7 +204,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"watershed_results_sdr.shp": {
"about": "Table containing biophysical values for each watershed",
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"sed_export": {
"type": "number",
@ -270,8 +270,8 @@ MODEL_SPEC = spec_utils.build_model_spec({
"units": u.metric_ton/(u.hectare*u.year)
}}
},
"flow_accumulation.tif": spec_utils.FLOW_ACCUMULATION,
"flow_direction.tif": spec_utils.FLOW_DIRECTION,
"flow_accumulation.tif": spec.FLOW_ACCUMULATION,
"flow_direction.tif": spec.FLOW_DIRECTION,
"ic.tif": {
"about": gettext("Index of connectivity (Eq. (70))"),
"bands": {1: {
@ -286,7 +286,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"units": u.none
}}
},
"pit_filled_dem.tif": spec_utils.FILLED_DEM,
"pit_filled_dem.tif": spec.FILLED_DEM,
"s_accumulation.tif": {
"about": gettext(
"Flow accumulation weighted by the thresholded slope. "
@ -309,7 +309,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": gettext("Sediment delivery ratio (Eq. (75))"),
"bands": {1: {"type": "ratio"}}
},
"slope.tif": spec_utils.SLOPE,
"slope.tif": spec.SLOPE,
"slope_threshold.tif": {
"about": gettext(
"Percent slope, thresholded to be no less than 0.005 "
@ -459,7 +459,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
},
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -14,7 +14,7 @@ from osgeo import gdal
from osgeo import ogr
from .. import gettext
from .. import spec_utils
from .. import spec
from .. import utils
from .. import validation
from ..unit_registry import u
@ -28,7 +28,7 @@ MONTH_ID_TO_LABEL = [
'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct',
'nov', 'dec']
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "seasonal_water_yield",
"model_title": gettext("Seasonal Water Yield"),
"userguide": "seasonal_water_yield.html",
@ -52,10 +52,10 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"threshold_flow_accumulation": spec_utils.THRESHOLD_FLOW_ACCUMULATION,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"threshold_flow_accumulation": spec.THRESHOLD_FLOW_ACCUMULATION,
"et0_dir": {
"type": "directory",
"contents": {
@ -116,31 +116,31 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("precipitation directory")
},
"dem_raster_path": {
**spec_utils.DEM,
**spec.DEM,
"projected": True
},
"lulc_raster_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"about": spec_utils.LULC['about'] + " " + gettext(
"about": spec.LULC['about'] + " " + gettext(
"All values in this raster MUST "
"have corresponding entries in the Biophysical Table.")
},
"soil_group_path": {
**spec_utils.SOIL_GROUP,
**spec.SOIL_GROUP,
"projected": True,
"required": "not user_defined_local_recharge",
"allowed": "not user_defined_local_recharge"
},
"aoi_path": {
**spec_utils.AOI,
**spec.AOI,
"projected": True
},
"biophysical_table_path": {
"type": "csv",
"index_col": "lucode",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"cn_[SOIL_GROUP]": {
"type": "number",
"units": u.none,
@ -314,7 +314,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"Required if Use Monthly Alpha Table is selected."),
"name": gettext("monthly alpha table")
},
**spec_utils.FLOW_DIR_ALGORITHM
**spec.FLOW_DIR_ALGORITHM
},
"outputs": {
"B.tif": {
@ -402,7 +402,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"aggregated_results_swy.shp": {
"about": gettext("Table of biophysical values for each watershed"),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"qb": {
"about": gettext(
@ -475,7 +475,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"clipped to match the other spatial inputs"),
"bands": {1: {"type": "integer"}}
},
'flow_accum.tif': spec_utils.FLOW_ACCUMULATION,
'flow_accum.tif': spec.FLOW_ACCUMULATION,
'prcp_a[MONTH].tif': {
"bands": {1: {"type": "number", "units": u.millimeter/u.year}},
"about": gettext("Monthly precipitation rasters, aligned and "
@ -506,7 +506,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -12,7 +12,7 @@ from osgeo import ogr
from osgeo import osr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -25,7 +25,7 @@ UINT8_NODATA = 255
UINT16_NODATA = 65535
NONINTEGER_SOILS_RASTER_MESSAGE = 'Soil group raster data type must be integer'
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "stormwater",
"model_title": gettext("Urban Stormwater Retention"),
"userguide": "stormwater.html",
@ -45,20 +45,20 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"lulc_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True
},
"soil_group_path": spec_utils.SOIL_GROUP,
"precipitation_path": spec_utils.PRECIP,
"soil_group_path": spec.SOIL_GROUP,
"precipitation_path": spec.PRECIP,
"biophysical_table": {
"type": "csv",
"index_col": "lucode",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"emc_[POLLUTANT]": {
"type": "number",
"units": u.milligram/u.liter,
@ -142,7 +142,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("Road centerlines")
},
"aggregate_areas_path": {
**spec_utils.AOI,
**spec.AOI,
"required": False,
"about": gettext(
"Areas over which to aggregate results (typically watersheds "
@ -225,7 +225,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"Map of aggregate data. This is identical to the aggregate "
"areas input vector, but each polygon is given additional "
"fields with the aggregate data."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"mean_retention_ratio": {
"type": "ratio",
@ -307,7 +307,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"Copy of the road centerlines vector input, "
"reprojected to the LULC raster projection."),
"fields": {},
"geometries": spec_utils.LINES
"geometries": spec.LINES
},
"rasterized_centerlines.tif": {
@ -379,7 +379,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -13,7 +13,7 @@ from natcap.invest import cli
from natcap.invest import datastack
from natcap.invest import set_locale
from natcap.invest import models
from natcap.invest import spec_utils
from natcap.invest import spec
from natcap.invest import usage
from natcap.invest import validation

View File

@ -20,7 +20,7 @@ from osgeo import ogr
from osgeo import osr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -29,7 +29,7 @@ LOGGER = logging.getLogger(__name__)
TARGET_NODATA = -1
_LOGGING_PERIOD = 5
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "urban_cooling_model",
"model_title": gettext("Urban Cooling"),
"userguide": "urban_cooling_model.html",
@ -51,11 +51,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"lulc_raster_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"projection_units": u.meter,
"about": gettext(
@ -63,14 +63,14 @@ MODEL_SPEC = spec_utils.build_model_spec({
"raster must have corresponding entries in the Biophysical "
"Table.")
},
"ref_eto_raster_path": spec_utils.ET0,
"aoi_vector_path": spec_utils.AOI,
"ref_eto_raster_path": spec.ET0,
"aoi_vector_path": spec.AOI,
"biophysical_table_path": {
"name": gettext("biophysical table"),
"type": "csv",
"index_col": "lucode",
"columns": {
"lucode": spec_utils.LULC_TABLE_COLUMN,
"lucode": spec.LULC_TABLE_COLUMN,
"kc": {
"type": "number",
"units": u.none,
@ -176,7 +176,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": gettext(
"Code indicating the building type. These codes must "
"match those in the Energy Consumption Table.")}},
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"required": "do_energy_valuation",
"allowed": "do_energy_valuation",
"about": gettext(
@ -269,7 +269,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": (
"A copy of the input Area of Interest vector with "
"additional fields."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"avg_cc": {
"about": "Average CC value",
@ -308,7 +308,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"buildings_with_stats.shp": {
"about": "A copy of the input vector “Building Footprints” with additional fields.",
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"energy_sav": {
"about": "Energy savings value (kWh or currency if optional energy cost input column was provided in the Energy Consumption Table). Savings are relative to a theoretical scenario where the city contains NO natural areas nor green spaces; where CC = 0 for all LULC classes.",
@ -356,14 +356,14 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": (
"The Area of Interest vector reprojected to the "
"spatial reference of the LULC."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {}
},
"reprojected_buildings.shp": {
"about": (
"The buildings vector reprojected to the spatial "
"reference of the LULC."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {}
},
"albedo.tif": {
@ -408,7 +408,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -14,14 +14,14 @@ from osgeo import ogr
from osgeo import osr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
LOGGER = logging.getLogger(__name__)
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "urban_flood_risk_mitigation",
"model_title": gettext("Urban Flood Risk Mitigation"),
"userguide": "urban_flood_mitigation.html",
@ -42,10 +42,10 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"aoi_watersheds_path": spec_utils.AOI,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"aoi_watersheds_path": spec.AOI,
"rainfall_depth": {
"expression": "value > 0",
"type": "number",
@ -54,14 +54,14 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("rainfall depth")
},
"lulc_path": {
**spec_utils.LULC,
**spec.LULC,
"projected": True,
"about": gettext(
"Map of LULC. All values in this raster must have "
"corresponding entries in the Biophysical Table.")
},
"soils_hydrological_group_raster_path": {
**spec_utils.SOIL_GROUP,
**spec.SOIL_GROUP,
"projected": True
},
"curve_number_table_path": {
@ -94,7 +94,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"Code indicating the building type. These codes "
"must match those in the Damage Loss Table."
)}},
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"required": False,
"about": gettext("Map of building footprints."),
"name": gettext("built infrastructure")
@ -144,7 +144,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"flood_risk_service.shp": {
"about": "Aggregated results for each area of interest.",
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"rnf_rt_idx": {
"about": "Average runoff retention index.",
@ -186,14 +186,14 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": (
"Copy of AOI vector reprojected to the same spatial "
"reference as the LULC."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {}
},
"structures_reprojected.shp": {
"about": (
"Copy of built infrastructure vector reprojected to "
"the same spatial reference as the LULC."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {}
},
"aligned_lulc.tif": {
@ -214,7 +214,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -19,10 +19,10 @@ from osgeo import ogr
from osgeo import osr
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .spec_utils import u
from .spec import u
LOGGER = logging.getLogger(__name__)
UINT32_NODATA = int(numpy.iinfo(numpy.uint32).max)
@ -38,7 +38,7 @@ RADIUS_OPT_URBAN_NATURE = 'radius per urban nature class'
RADIUS_OPT_POP_GROUP = 'radius per population group'
POP_FIELD_REGEX = '^pop_'
ID_FIELDNAME = 'adm_unit_id'
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
'model_id': 'urban_nature_access',
'model_title': gettext('Urban Nature Access'),
'userguide': 'urban_nature_access.html',
@ -59,11 +59,11 @@ MODEL_SPEC = spec_utils.build_model_spec({
'different_projections_ok': True,
},
'args': {
'workspace_dir': spec_utils.WORKSPACE,
'results_suffix': spec_utils.SUFFIX,
'n_workers': spec_utils.N_WORKERS,
'workspace_dir': spec.WORKSPACE,
'results_suffix': spec.SUFFIX,
'n_workers': spec.N_WORKERS,
'lulc_raster_path': {
**spec_utils.LULC,
**spec.LULC,
'projected': True,
'projection_units': u.meter,
'about': (
@ -87,7 +87,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
),
'index_col': 'lucode',
'columns': {
'lucode': spec_utils.LULC_TABLE_COLUMN,
'lucode': spec.LULC_TABLE_COLUMN,
'urban_nature': {
'type': 'ratio',
'about': (
@ -130,7 +130,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
'admin_boundaries_vector_path': {
'type': 'vector',
'name': 'administrative boundaries',
'geometries': spec_utils.POLYGONS,
'geometries': spec.POLYGONS,
'fields': {
"pop_[POP_GROUP]": {
"type": "ratio",
@ -351,7 +351,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"about": (
"A copy of the user's administrative boundaries "
"vector with a single layer."),
"geometries": spec_utils.POLYGONS,
"geometries": spec.POLYGONS,
"fields": {
"SUP_DEMadm_cap": {
"type": "number",
@ -622,7 +622,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
'taskgraph_cache': spec_utils.TASKGRAPH_DIR,
'taskgraph_cache': spec.TASKGRAPH_DIR,
}
})

View File

@ -18,7 +18,7 @@ import pygeoprocessing
import requests
from . import utils
from . import spec_utils
from . import spec
ENCODING = sys.getfilesystemencoding()
LOGGER = logging.getLogger(__name__)
@ -121,10 +121,10 @@ def _calculate_args_bounding_box(args, model_spec):
# blank.
spatial_info = None
if (isinstance(model_spec.get_input(key),
spec_utils.SingleBandRasterInput) and value.strip() != ''):
spec.SingleBandRasterInput) and value.strip() != ''):
spatial_info = pygeoprocessing.get_raster_info(value)
elif (isinstance(model_spec.get_input(key),
spec_utils.VectorInput) and value.strip() != ''):
spec.VectorInput) and value.strip() != ''):
spatial_info = pygeoprocessing.get_vector_info(value)
if spatial_info:

View File

@ -18,7 +18,7 @@ from osgeo import ogr
import taskgraph
import pygeoprocessing
from . import utils
from . import spec_utils
from . import spec
from .unit_registry import u
from . import validation
from . import gettext
@ -130,7 +130,7 @@ CAPTURED_WEM_FIELDS = {
}
}
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "wave_energy",
"model_title": gettext("Wave Energy Production"),
"userguide": "wave_energy.html",
@ -145,23 +145,23 @@ MODEL_SPEC = spec_utils.build_model_spec({
"hidden": ["n_workers"]
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"wave_base_data_path": {
"type": "directory",
"contents": {
"NAmerica_WestCoast_4m.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"about": gettext(
"Point vector for the west coast of North America and "
"Hawaii.")},
"WCNA_extract.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"about": gettext(
"Extract vector for the west coast of North America "
"and Hawaii.")},
@ -173,14 +173,14 @@ MODEL_SPEC = spec_utils.build_model_spec({
"NAmerica_EastCoast_4m.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"about": gettext(
"Point vector for the East Coast of North America and "
"Puerto Rico.")},
"ECNA_extract.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"about": gettext(
"Extract vector for the East Coast of North America "
"and Puerto Rico.")},
@ -192,13 +192,13 @@ MODEL_SPEC = spec_utils.build_model_spec({
"North_Sea_4m.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"about": gettext(
"Point vector for the North Sea 4 meter resolution.")},
"North_Sea_4m_Extract.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"about": gettext(
"Extract vector for the North Sea 4 meter resolution.")},
"North_Sea_4m.bin": {
@ -209,13 +209,13 @@ MODEL_SPEC = spec_utils.build_model_spec({
"North_Sea_10m.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"about": gettext(
"Point vector for the North Sea 10 meter resolution.")},
"North_Sea_10m_Extract.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"about": gettext(
"Extract vector for the North Sea 10 meter resolution.")},
"North_Sea_10m.bin": {
@ -226,12 +226,12 @@ MODEL_SPEC = spec_utils.build_model_spec({
"Australia_4m.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"about": gettext("Point vector for Australia.")},
"Australia_Extract.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"about": gettext("Extract vector for Australia.")},
"Australia_4m.bin": {
"type": "file",
@ -239,12 +239,12 @@ MODEL_SPEC = spec_utils.build_model_spec({
"Global.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"about": gettext("Global point vector.")},
"Global_extract.shp": {
"type": "vector",
"fields": {},
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"about": gettext("Global extract vector.")},
"Global_WW3.txt.bin": {
"type": "file",
@ -274,7 +274,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("analysis area")
},
"aoi_path": {
**spec_utils.AOI,
**spec.AOI,
"projected": True,
"projection_units": u.meter,
"required": False
@ -504,17 +504,17 @@ MODEL_SPEC = spec_utils.build_model_spec({
"contents": {
"aoi_clipped_to_extract_path.shp": {
"about": "AOI clipped to the analysis area",
"geometries": spec_utils.POLYGON,
"geometries": spec.POLYGON,
"fields": {}
},
"Captured_WEM_InputOutput_Pts.shp": {
"about": "Map of wave data points.",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": CAPTURED_WEM_FIELDS
},
"Final_WEM_InputOutput_Pts.shp": {
"about": "Map of wave data points.",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": {
**CAPTURED_WEM_FIELDS,
"W2L_MDIST": {
@ -552,7 +552,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"Indexed_WEM_InputOutput_Pts.shp": {
"about": "Map of wave data points.",
"fields": INDEXED_WEM_FIELDS,
"geometries": spec_utils.POINT
"geometries": spec.POINT
},
"interpolated_capwe_mwh.tif": {
"about": "Interpolated wave energy",
@ -576,7 +576,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"WEM_InputOutput_Pts.shp": {
"about": "Map of wave data points.",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": WEM_FIELDS
},
"GridPt.txt": {
@ -589,7 +589,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -20,7 +20,7 @@ from scipy import integrate
from shapely import speedups
from . import gettext
from . import spec_utils
from . import spec
from . import utils
from . import validation
from .unit_registry import u
@ -90,7 +90,7 @@ OUTPUT_WIND_DATA_FIELDS = {
}
}
MODEL_SPEC = spec_utils.build_model_spec({
MODEL_SPEC = spec.build_model_spec({
"model_id": "wind_energy",
"model_title": gettext("Wind Energy Production"),
"userguide": "wind_energy.html",
@ -110,9 +110,9 @@ MODEL_SPEC = spec_utils.build_model_spec({
"different_projections_ok": True,
},
"args": {
"workspace_dir": spec_utils.WORKSPACE,
"results_suffix": spec_utils.SUFFIX,
"n_workers": spec_utils.N_WORKERS,
"workspace_dir": spec.WORKSPACE,
"results_suffix": spec.SUFFIX,
"n_workers": spec.N_WORKERS,
"wind_data_path": {
"type": "csv",
"columns": INPUT_WIND_DATA_FIELDS,
@ -120,7 +120,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
"name": gettext("wind data points")
},
"aoi_vector_path": {
**spec_utils.AOI,
**spec.AOI,
"projected": True,
"projection_units": u.meter,
"required": "valuation_container and grid_points_path",
@ -489,7 +489,7 @@ MODEL_SPEC = spec_utils.build_model_spec({
},
"wind_energy_points.shp": {
"about": gettext("Map of summarized data at each point."),
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": OUTPUT_WIND_DATA_FIELDS
}
}
@ -540,12 +540,12 @@ MODEL_SPEC = spec_utils.build_model_spec({
"wind_data.pickle": {"about": "Pickled wind data dictionary"},
"wind_energy_points_from_data.shp": {
"about": "Wind data",
"geometries": spec_utils.POINT,
"geometries": spec.POINT,
"fields": OUTPUT_WIND_DATA_FIELDS
}
}
},
"taskgraph_cache": spec_utils.TASKGRAPH_DIR
"taskgraph_cache": spec.TASKGRAPH_DIR
}
})

View File

@ -328,7 +328,7 @@ class DatastackArchiveTests(unittest.TestCase):
"""Datastack: test archive extraction."""
from natcap.invest import datastack
from natcap.invest import utils
from natcap.invest import spec_utils
from natcap.invest import spec
from natcap.invest import validation
params = {
@ -408,11 +408,11 @@ class DatastackArchiveTests(unittest.TestCase):
self.assertTrue(
filecmp.cmp(archive_params[key], params[key], shallow=False))
spatial_csv_dict = spec_utils.CSVInput(
spatial_csv_dict = spec.CSVInput(
index_col='id',
columns=spec_utils.Columns(
spec_utils.IntegerInput(id='id'),
spec_utils.FileInput(id='path'))
columns=spec.Columns(
spec.IntegerInput(id='id'),
spec.FileInput(id='path'))
).get_validated_dataframe(
archive_params['spatial_table']
).to_dict(orient='index')

View File

@ -1,25 +1,25 @@
from natcap.invest import spec_utils
from natcap.invest import spec
MODEL_SPEC = spec_utils.ModelSpec(inputs=[
spec_utils.StringInput(id='blank'),
spec_utils.IntegerInput(id='a'),
spec_utils.StringInput(id='b'),
spec_utils.StringInput(id='c'),
spec_utils.FileInput(id='foo'),
spec_utils.FileInput(id='bar'),
spec_utils.DirectoryInput(id='data_dir', contents={}),
spec_utils.SingleBandRasterInput(id='raster', band=spec_utils.Input()),
spec_utils.VectorInput(id='vector', fields={}, geometries={}),
spec_utils.CSVInput(id='simple_table'),
spec_utils.CSVInput(
MODEL_SPEC = spec.ModelSpec(inputs=[
spec.StringInput(id='blank'),
spec.IntegerInput(id='a'),
spec.StringInput(id='b'),
spec.StringInput(id='c'),
spec.FileInput(id='foo'),
spec.FileInput(id='bar'),
spec.DirectoryInput(id='data_dir', contents={}),
spec.SingleBandRasterInput(id='raster', band=spec.Input()),
spec.VectorInput(id='vector', fields={}, geometries={}),
spec.CSVInput(id='simple_table'),
spec.CSVInput(
id='spatial_table',
columns=spec_utils.Columns(
spec_utils.IntegerInput(id='ID'),
spec_utils.RasterOrVectorInput(
columns=spec.Columns(
spec.IntegerInput(id='ID'),
spec.RasterOrVectorInput(
id='path',
fields={},
geometries={'POINT', 'POLYGON'},
band=spec_utils.NumberInput()
band=spec.NumberInput()
)
)
)],
@ -27,6 +27,6 @@ MODEL_SPEC = spec_utils.ModelSpec(inputs=[
model_id='',
model_title='',
userguide='',
ui_spec=spec_utils.UISpec(),
ui_spec=spec.UISpec(),
args_with_spatial_overlap={}
)

View File

@ -1,14 +1,14 @@
from natcap.invest import spec_utils
from natcap.invest import spec
MODEL_SPEC = spec_utils.ModelSpec(
MODEL_SPEC = spec.ModelSpec(
inputs=[
spec_utils.FileInput(id='foo'),
spec_utils.FileInput(id='bar')
spec.FileInput(id='foo'),
spec.FileInput(id='bar')
],
outputs={},
model_id='',
model_title='',
userguide='',
ui_spec=spec_utils.UISpec(),
ui_spec=spec.UISpec(),
args_with_spatial_overlap={}
)

View File

@ -1,14 +1,14 @@
from natcap.invest import spec_utils
from natcap.invest import spec
MODEL_SPEC = spec_utils.ModelSpec(inputs=[
spec_utils.FileInput(id='some_file'),
spec_utils.DirectoryInput(
MODEL_SPEC = spec.ModelSpec(inputs=[
spec.FileInput(id='some_file'),
spec.DirectoryInput(
id='data_dir',
contents=spec_utils.Contents())],
contents=spec.Contents())],
outputs={},
model_id='',
model_title='',
userguide='',
ui_spec=spec_utils.UISpec(),
ui_spec=spec.UISpec(),
args_with_spatial_overlap={}
)

View File

@ -1,11 +1,11 @@
from natcap.invest import spec_utils
from natcap.invest import spec
MODEL_SPEC = spec_utils.ModelSpec(inputs=[
spec_utils.SingleBandRasterInput(id='raster', band=spec_utils.Input())],
MODEL_SPEC = spec.ModelSpec(inputs=[
spec.SingleBandRasterInput(id='raster', band=spec.Input())],
outputs={},
model_id='',
model_title='',
userguide='',
ui_spec=spec_utils.UISpec(),
ui_spec=spec.UISpec(),
args_with_spatial_overlap={}
)

View File

@ -1,18 +1,18 @@
from natcap.invest import spec_utils
from natcap.invest import spec
MODEL_SPEC = spec_utils.ModelSpec(inputs=[
spec_utils.IntegerInput(id='a'),
spec_utils.StringInput(id='b'),
spec_utils.StringInput(id='c'),
spec_utils.StringInput(id='d'),
spec_utils.DirectoryInput(
MODEL_SPEC = spec.ModelSpec(inputs=[
spec.IntegerInput(id='a'),
spec.StringInput(id='b'),
spec.StringInput(id='c'),
spec.StringInput(id='d'),
spec.DirectoryInput(
id='workspace_dir',
contents=spec_utils.Contents()
contents=spec.Contents()
)],
outputs={},
model_id='',
model_title='',
userguide='',
ui_spec=spec_utils.UISpec(),
ui_spec=spec.UISpec(),
args_with_spatial_overlap={}
)

View File

@ -1,12 +1,12 @@
from natcap.invest import spec_utils
from natcap.invest import spec
MODEL_SPEC = SimpleNamespace(inputs=[
spec_utils.StringInput(id='foo'),
spec_utils.StringInput(id='bar')],
spec.StringInput(id='foo'),
spec.StringInput(id='bar')],
outputs={},
model_id='',
model_title='',
userguide='',
ui_spec=spec_utils.UISpec(),
ui_spec=spec.UISpec(),
args_with_spatial_overlap={}
)

View File

@ -1,12 +1,12 @@
from natcap.invest import spec_utils
from natcap.invest import spec
MODEL_SPEC = spec_utils.ModelSpec(inputs=[
spec_utils.VectorInput(
MODEL_SPEC = spec.ModelSpec(inputs=[
spec.VectorInput(
id='vector', fields={}, geometries={})],
outputs={},
model_id='',
model_title='',
userguide='',
ui_spec=spec_utils.UISpec(),
ui_spec=spec.UISpec(),
args_with_spatial_overlap={}
)

View File

@ -6,7 +6,7 @@ import pytest
import pint
from natcap.invest.models import model_id_to_pyname
from natcap.invest import spec_utils
from natcap.invest import spec
from osgeo import gdal
PLUGIN_URL = 'git+https://github.com/emlys/demo-invest-plugin.git'
@ -16,100 +16,100 @@ PLUGIN_NAME = 'foo-model'
gdal.UseExceptions()
valid_nested_input_types = {
None: { # if no parent type (arg is top-level), then all types are valid
spec_utils.IntegerInput,
spec_utils.NumberInput,
spec_utils.RatioInput,
spec_utils.PercentInput,
spec_utils.StringInput,
spec_utils.OptionStringInput,
spec_utils.BooleanInput,
spec_utils.SingleBandRasterInput,
spec_utils.VectorInput,
spec_utils.RasterOrVectorInput,
spec_utils.CSVInput,
spec_utils.DirectoryInput,
spec_utils.FileInput
spec.IntegerInput,
spec.NumberInput,
spec.RatioInput,
spec.PercentInput,
spec.StringInput,
spec.OptionStringInput,
spec.BooleanInput,
spec.SingleBandRasterInput,
spec.VectorInput,
spec.RasterOrVectorInput,
spec.CSVInput,
spec.DirectoryInput,
spec.FileInput
},
spec_utils.SingleBandRasterInput: {
spec_utils.IntegerInput,
spec_utils.NumberInput,
spec_utils.RatioInput,
spec_utils.PercentInput
spec.SingleBandRasterInput: {
spec.IntegerInput,
spec.NumberInput,
spec.RatioInput,
spec.PercentInput
},
spec_utils.VectorInput: {
spec_utils.IntegerInput,
spec_utils.NumberInput,
spec_utils.RatioInput,
spec_utils.PercentInput,
spec_utils.StringInput,
spec_utils.OptionStringInput
spec.VectorInput: {
spec.IntegerInput,
spec.NumberInput,
spec.RatioInput,
spec.PercentInput,
spec.StringInput,
spec.OptionStringInput
},
spec_utils.CSVInput: {
spec_utils.IntegerInput,
spec_utils.NumberInput,
spec_utils.RatioInput,
spec_utils.PercentInput,
spec_utils.StringInput,
spec_utils.OptionStringInput,
spec_utils.BooleanInput,
spec_utils.SingleBandRasterInput,
spec_utils.VectorInput,
spec_utils.RasterOrVectorInput
spec.CSVInput: {
spec.IntegerInput,
spec.NumberInput,
spec.RatioInput,
spec.PercentInput,
spec.StringInput,
spec.OptionStringInput,
spec.BooleanInput,
spec.SingleBandRasterInput,
spec.VectorInput,
spec.RasterOrVectorInput
},
spec_utils.DirectoryInput: {
spec_utils.CSVInput,
spec_utils.DirectoryInput,
spec_utils.FileInput,
spec_utils.SingleBandRasterInput,
spec_utils.VectorInput,
spec_utils.RasterOrVectorInput
spec.DirectoryInput: {
spec.CSVInput,
spec.DirectoryInput,
spec.FileInput,
spec.SingleBandRasterInput,
spec.VectorInput,
spec.RasterOrVectorInput
}
}
valid_nested_output_types = {
None: { # if no parent type (arg is top-level), then all types are valid
spec_utils.IntegerOutput,
spec_utils.NumberOutput,
spec_utils.RatioOutput,
spec_utils.PercentOutput,
spec_utils.StringOutput,
spec_utils.OptionStringOutput,
spec_utils.SingleBandRasterOutput,
spec_utils.VectorOutput,
spec_utils.CSVOutput,
spec_utils.DirectoryOutput,
spec_utils.FileOutput
spec.IntegerOutput,
spec.NumberOutput,
spec.RatioOutput,
spec.PercentOutput,
spec.StringOutput,
spec.OptionStringOutput,
spec.SingleBandRasterOutput,
spec.VectorOutput,
spec.CSVOutput,
spec.DirectoryOutput,
spec.FileOutput
},
spec_utils.SingleBandRasterOutput: {
spec_utils.IntegerOutput,
spec_utils.NumberOutput,
spec_utils.RatioOutput,
spec_utils.PercentOutput
spec.SingleBandRasterOutput: {
spec.IntegerOutput,
spec.NumberOutput,
spec.RatioOutput,
spec.PercentOutput
},
spec_utils.VectorOutput: {
spec_utils.IntegerOutput,
spec_utils.NumberOutput,
spec_utils.RatioOutput,
spec_utils.PercentOutput,
spec_utils.StringOutput,
spec_utils.OptionStringOutput
spec.VectorOutput: {
spec.IntegerOutput,
spec.NumberOutput,
spec.RatioOutput,
spec.PercentOutput,
spec.StringOutput,
spec.OptionStringOutput
},
spec_utils.CSVOutput: {
spec_utils.IntegerOutput,
spec_utils.NumberOutput,
spec_utils.RatioOutput,
spec_utils.PercentOutput,
spec_utils.StringOutput,
spec_utils.OptionStringOutput,
spec_utils.SingleBandRasterOutput,
spec_utils.VectorOutput
spec.CSVOutput: {
spec.IntegerOutput,
spec.NumberOutput,
spec.RatioOutput,
spec.PercentOutput,
spec.StringOutput,
spec.OptionStringOutput,
spec.SingleBandRasterOutput,
spec.VectorOutput
},
spec_utils.DirectoryOutput: {
spec_utils.CSVOutput,
spec_utils.DirectoryOutput,
spec_utils.FileOutput,
spec_utils.SingleBandRasterOutput,
spec_utils.VectorOutput
spec.DirectoryOutput: {
spec.CSVOutput,
spec.DirectoryOutput,
spec.FileOutput,
spec.SingleBandRasterOutput,
spec.VectorOutput
}
}
@ -137,7 +137,7 @@ class ValidateModelSpecs(unittest.TestCase):
set(model.MODEL_SPEC.args_with_spatial_overlap).issubset(
{'spatial_keys', 'different_projections_ok'}))
self.assertIsInstance(model.MODEL_SPEC.ui_spec, spec_utils.UISpec)
self.assertIsInstance(model.MODEL_SPEC.ui_spec, spec.UISpec)
if model.MODEL_SPEC.ui_spec.dropdown_functions:
self.assertIsInstance(
model.MODEL_SPEC.ui_spec.dropdown_functions, dict)
@ -191,13 +191,13 @@ class ValidateModelSpecs(unittest.TestCase):
t = type(spec)
self.assertIn(t, valid_nested_output_types[parent_type])
if t is spec_utils.NumberOutput:
if t is spec.NumberOutput:
# number type should have a units property
self.assertTrue(hasattr(spec, 'units'))
# Undefined units should use the custom u.none unit
self.assertIsInstance(spec.units, pint.Unit)
elif t is spec_utils.SingleBandRasterOutput:
elif t is spec.SingleBandRasterOutput:
# raster type should have a bands property that maps each band
# index to a nested type dictionary describing the band's data
self.assertTrue(hasattr(spec, 'band'))
@ -206,13 +206,13 @@ class ValidateModelSpecs(unittest.TestCase):
f'{key}.band',
parent_type=t)
elif t is spec_utils.VectorOutput:
elif t is spec.VectorOutput:
# vector type should have:
# - a fields property that maps each field header to a nested
# type dictionary describing the data in that field
# - a geometries property: the set of valid geometry types
self.assertTrue(hasattr(spec, 'fields'))
self.assertIsInstance(spec.fields, spec_utils.Fields)
self.assertIsInstance(spec.fields, spec.Fields)
for field in spec.fields:
self.validate_output(
field,
@ -222,14 +222,14 @@ class ValidateModelSpecs(unittest.TestCase):
self.assertTrue(hasattr(spec, 'geometries'))
self.assertIsInstance(spec.geometries, set)
elif t is spec_utils.CSVOutput:
elif t is spec.CSVOutput:
# csv type may have a columns property.
# the columns property maps each expected column header
# name/pattern to a nested type dictionary describing the data
# in that column. may be absent if the table structure
# is too complex to describe this way.
self.assertTrue(hasattr(spec, 'columns'))
self.assertIsInstance(spec.columns, spec_utils.Columns)
self.assertIsInstance(spec.columns, spec.Columns)
for column in spec.columns:
self.validate_output(
column,
@ -238,19 +238,19 @@ class ValidateModelSpecs(unittest.TestCase):
if spec.index_col:
self.assertIn(spec.index_col, [s.id for s in spec.columns])
elif t is spec_utils.DirectoryOutput:
elif t is spec.DirectoryOutput:
# directory type should have a contents property that maps each
# expected path name/pattern within the directory to a nested
# type dictionary describing the data at that filepath
self.assertTrue(hasattr(spec, 'contents'))
self.assertIsInstance(spec.contents, spec_utils.Contents)
self.assertIsInstance(spec.contents, spec.Contents)
for path in spec.contents:
self.validate_output(
path,
f'{key}.contents.{path}',
parent_type=t)
elif t is spec_utils.OptionStringOutput:
elif t is spec.OptionStringOutput:
# option_string type should have an options property that
# describes the valid options
self.assertTrue(hasattr(spec, 'options'))
@ -260,7 +260,7 @@ class ValidateModelSpecs(unittest.TestCase):
isinstance(option, str) or
isinstance(option, int))
elif t is spec_utils.FileOutput:
elif t is spec.FileOutput:
pass
# iterate over the remaining attributes
@ -298,7 +298,7 @@ class ValidateModelSpecs(unittest.TestCase):
t = type(arg)
self.assertIn(t, valid_nested_input_types[parent_type])
if t is spec_utils.OptionStringInput:
if t is spec.OptionStringInput:
# option_string type should have an options property that
# describes the valid options
self.assertTrue(hasattr(arg, 'options'))
@ -330,7 +330,7 @@ class ValidateModelSpecs(unittest.TestCase):
attrs.remove('options')
elif t is spec_utils.StringInput:
elif t is spec.StringInput:
# freestyle_string may optionally have a regexp attribute
# this is a regular expression that the string must match
if arg.regexp:
@ -338,7 +338,7 @@ class ValidateModelSpecs(unittest.TestCase):
re.compile(arg.regexp) # should be regex compilable
attrs.remove('regexp')
elif t is spec_utils.NumberInput:
elif t is spec.NumberInput:
# number type should have a units property
self.assertTrue(hasattr(arg, 'units'))
# Undefined units should use the custom u.none unit
@ -353,7 +353,7 @@ class ValidateModelSpecs(unittest.TestCase):
if arg.expression:
self.assertIsInstance(arg.expression, str)
elif t is spec_utils.SingleBandRasterInput:
elif t is spec.SingleBandRasterInput:
# raster type should have a bands property that maps each band
# index to a nested type dictionary describing the band's data
self.assertTrue(hasattr(arg, 'band'))
@ -377,13 +377,13 @@ class ValidateModelSpecs(unittest.TestCase):
arg.projection_units, pint.Unit)
attrs.remove('projection_units')
elif t is spec_utils.VectorInput:
elif t is spec.VectorInput:
# vector type should have:
# - a fields property that maps each field header to a nested
# type dictionary describing the data in that field
# - a geometries property: the set of valid geometry types
self.assertTrue(hasattr(arg, 'fields'))
self.assertIsInstance(arg.fields, spec_utils.Fields)
self.assertIsInstance(arg.fields, spec.Fields)
for field in arg.fields:
self.validate_args(
field,
@ -411,7 +411,7 @@ class ValidateModelSpecs(unittest.TestCase):
arg.projection_units, pint.Unit)
attrs.remove('projection_units')
elif t is spec_utils.CSVInput:
elif t is spec.CSVInput:
# csv type should have a rows property, columns property, or
# neither. rows or columns properties map each expected header
# name/pattern to a nested type dictionary describing the data
@ -435,12 +435,12 @@ class ValidateModelSpecs(unittest.TestCase):
if arg.index_col:
self.assertIn(arg.index_col, [s.id for s in arg.columns])
elif t is spec_utils.DirectoryInput:
elif t is spec.DirectoryInput:
# directory type should have a contents property that maps each
# expected path name/pattern within the directory to a nested
# type dictionary describing the data at that filepath
self.assertTrue(hasattr(arg, 'contents'))
self.assertIsInstance(arg.contents, spec_utils.Contents)
self.assertIsInstance(arg.contents, spec.Contents)
for path in arg.contents:
self.validate_args(
path,
@ -460,7 +460,7 @@ class ValidateModelSpecs(unittest.TestCase):
self.assertIsInstance(arg.must_exist, bool)
attrs.remove('must_exist')
elif t is spec_utils.FileInput:
elif t is spec.FileInput:
# file type may optionally have a 'permissions' attribute
# this is a string listing the permissions e.g. 'rwx'
if arg.permissions:
@ -505,7 +505,7 @@ class ValidateModelSpecs(unittest.TestCase):
def test_model_specs_serialize(self):
"""MODEL_SPEC: test each arg spec can serialize to JSON."""
from natcap.invest import spec_utils
from natcap.invest import spec
for pyname in model_id_to_pyname.values():
model = importlib.import_module(pyname)
@ -513,11 +513,11 @@ class ValidateModelSpecs(unittest.TestCase):
class SpecUtilsTests(unittest.TestCase):
"""Tests for natcap.invest.spec_utils."""
"""Tests for natcap.invest.spec."""
def test_format_unit(self):
"""spec_utils: test converting units to strings with format_unit."""
from natcap.invest import spec_utils
"""spec: test converting units to strings with format_unit."""
from natcap.invest import spec
for unit_name, expected in [
('meter', 'm'),
('meter / second', 'm/s'),
@ -525,15 +525,15 @@ class SpecUtilsTests(unittest.TestCase):
('t * hr * ha / ha / MJ / mm', 't · h · ha / (ha · MJ · mm)'),
('mm^3 / year', 'mm³/year')
]:
unit = spec_utils.u.Unit(unit_name)
actual = spec_utils.format_unit(unit)
unit = spec.u.Unit(unit_name)
actual = spec.format_unit(unit)
self.assertEqual(expected, actual)
def test_format_unit_raises_error(self):
"""spec_utils: format_unit raises TypeError if not a pint.Unit."""
from natcap.invest import spec_utils
"""spec: format_unit raises TypeError if not a pint.Unit."""
from natcap.invest import spec
with self.assertRaises(TypeError):
spec_utils.format_unit({})
spec.format_unit({})
@pytest.mark.skip(reason="Possible race condition of plugin not being uninstalled before other tests are run.")
class PluginTests(unittest.TestCase):

View File

@ -5,7 +5,7 @@ import types
import unittest
import geometamaker
from natcap.invest import spec_utils
from natcap.invest import spec
from natcap.invest.unit_registry import u
from osgeo import gdal
from osgeo import ogr
@ -14,11 +14,11 @@ gdal.UseExceptions()
class SpecUtilsUnitTests(unittest.TestCase):
"""Unit tests for natcap.invest.spec_utils."""
"""Unit tests for natcap.invest.spec."""
def test_format_unit(self):
"""spec_utils: test converting units to strings with format_unit."""
from natcap.invest import spec_utils
"""spec: test converting units to strings with format_unit."""
from natcap.invest import spec
for unit_name, expected in [
('meter', 'm'),
('meter / second', 'm/s'),
@ -26,87 +26,87 @@ class SpecUtilsUnitTests(unittest.TestCase):
('t * hr * ha / ha / MJ / mm', 't · h · ha / (ha · MJ · mm)'),
('mm^3 / year', 'mm³/year')
]:
unit = spec_utils.u.Unit(unit_name)
actual = spec_utils.format_unit(unit)
unit = spec.u.Unit(unit_name)
actual = spec.format_unit(unit)
self.assertEqual(expected, actual)
def test_format_unit_raises_error(self):
"""spec_utils: format_unit raises TypeError if not a pint.Unit."""
from natcap.invest import spec_utils
"""spec: format_unit raises TypeError if not a pint.Unit."""
from natcap.invest import spec
with self.assertRaises(TypeError):
spec_utils.format_unit({})
spec.format_unit({})
class TestDescribeArgFromSpec(unittest.TestCase):
"""Test building RST for various invest args specifications."""
def test_number_spec(self):
spec = spec_utils.NumberInput(
spec = spec.NumberInput(
name="Bar",
about="Description",
units=u.meter**3/u.month,
expression="value >= 0"
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`number <input_types.html#number>`__, '
'units: **m³/month**, *required*): Description'])
self.assertEqual(repr(out), repr(expected_rst))
def test_ratio_spec(self):
spec = spec_utils.RatioInput(
spec = spec.RatioInput(
name="Bar",
about="Description"
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = (['**Bar** (`ratio <input_types.html#ratio>`__, '
'*required*): Description'])
self.assertEqual(repr(out), repr(expected_rst))
def test_percent_spec(self):
spec = spec_utils.PercentInput(
spec = spec.PercentInput(
name="Bar",
about="Description",
required=False
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = (['**Bar** (`percent <input_types.html#percent>`__, '
'*optional*): Description'])
self.assertEqual(repr(out), repr(expected_rst))
def test_integer_spec(self):
spec = spec_utils.IntegerInput(
spec = spec.IntegerInput(
name="Bar",
about="Description",
required=True
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = (['**Bar** (`integer <input_types.html#integer>`__, '
'*required*): Description'])
self.assertEqual(repr(out), repr(expected_rst))
def test_boolean_spec(self):
spec = spec_utils.BooleanInput(
spec = spec.BooleanInput(
name="Bar",
about="Description"
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = (['**Bar** (`true/false <input_types.html#truefalse>'
'`__): Description'])
self.assertEqual(repr(out), repr(expected_rst))
def test_freestyle_string_spec(self):
spec = spec_utils.StringInput(
spec = spec.StringInput(
name="Bar",
about="Description"
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = (['**Bar** (`text <input_types.html#text>`__, '
'*required*): Description'])
self.assertEqual(repr(out), repr(expected_rst))
def test_option_string_spec_dictionary(self):
spec = spec_utils.OptionStringInput(
spec = spec.OptionStringInput(
name="Bar",
about="Description",
options={
@ -124,7 +124,7 @@ class TestDescribeArgFromSpec(unittest.TestCase):
)
# expect that option case is ignored
# otherwise, c would sort before A
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`option <input_types.html#option>`__, *required*): Description',
'\tOptions:',
@ -135,12 +135,12 @@ class TestDescribeArgFromSpec(unittest.TestCase):
self.assertEqual(repr(out), repr(expected_rst))
def test_option_string_spec_list(self):
spec = spec_utils.OptionStringInput(
spec = spec.OptionStringInput(
name="Bar",
about="Description",
options=["option_a", "Option_b"]
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`option <input_types.html#option>`__, *required*): Description',
'\tOptions: option_a, Option_b'
@ -148,48 +148,48 @@ class TestDescribeArgFromSpec(unittest.TestCase):
self.assertEqual(repr(out), repr(expected_rst))
def test_raster_spec(self):
spec = spec_utils.SingleBandRasterInput(
band=spec_utils.IntegerInput(),
spec = spec.SingleBandRasterInput(
band=spec.IntegerInput(),
about="Description",
name="Bar"
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`raster <input_types.html#raster>`__, *required*): Description'
])
self.assertEqual(repr(out), repr(expected_rst))
spec = spec_utils.SingleBandRasterInput(
band=spec_utils.NumberInput(units=u.millimeter/u.year),
spec = spec.SingleBandRasterInput(
band=spec.NumberInput(units=u.millimeter/u.year),
about="Description",
name="Bar"
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`raster <input_types.html#raster>`__, units: **mm/year**, *required*): Description'
])
self.assertEqual(repr(out), repr(expected_rst))
def test_vector_spec(self):
spec = spec_utils.VectorInput(
spec = spec.VectorInput(
fields={},
geometries={"LINESTRING"},
about="Description",
name="Bar"
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`vector <input_types.html#vector>`__, linestring, *required*): Description'
])
self.assertEqual(repr(out), repr(expected_rst))
spec = spec_utils.VectorInput(
fields=spec_utils.Fields(
spec_utils.IntegerInput(
spec = spec.VectorInput(
fields=spec.Fields(
spec.IntegerInput(
id="id",
about="Unique identifier for each feature"
),
spec_utils.NumberInput(
spec.NumberInput(
id="precipitation",
units=u.millimeter/u.year,
about="Average annual precipitation over the area"
@ -199,18 +199,18 @@ class TestDescribeArgFromSpec(unittest.TestCase):
about="Description",
name="Bar"
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`vector <input_types.html#vector>`__, polygon/multipolygon, *required*): Description',
])
self.assertEqual(repr(out), repr(expected_rst))
def test_csv_spec(self):
spec = spec_utils.CSVInput(
spec = spec.CSVInput(
about="Description.",
name="Bar"
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`CSV <input_types.html#csv>`__, *required*): Description. '
'Please see the sample data table for details on the format.'
@ -219,17 +219,17 @@ class TestDescribeArgFromSpec(unittest.TestCase):
# Test every type that can be nested in a CSV column:
# number, ratio, percent, code,
spec = spec_utils.CSVInput(
spec = spec.CSVInput(
about="Description",
name="Bar",
columns=spec_utils.Columns(
spec_utils.RatioInput(
columns=spec.Columns(
spec.RatioInput(
id="b",
about="description"
)
)
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`CSV <input_types.html#csv>`__, *required*): Description'
])
@ -237,26 +237,26 @@ class TestDescribeArgFromSpec(unittest.TestCase):
def test_directory_spec(self):
self.maxDiff = None
spec = spec_utils.DirectoryInput(
spec = spec.DirectoryInput(
about="Description",
name="Bar",
contents={}
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`directory <input_types.html#directory>`__, *required*): Description'
])
self.assertEqual(repr(out), repr(expected_rst))
def test_multi_type_spec(self):
spec = spec_utils.RasterOrVectorInput(
spec = spec.RasterOrVectorInput(
about="Description",
name="Bar",
band=spec_utils.IntegerInput(),
band=spec.IntegerInput(),
geometries={"POLYGON"},
fields={}
)
out = spec_utils.describe_arg_from_spec(spec.name, spec)
out = spec.describe_arg_from_spec(spec.name, spec)
expected_rst = ([
'**Bar** (`raster <input_types.html#raster>`__ or `vector <input_types.html#vector>`__, *required*): Description'
])
@ -264,7 +264,7 @@ class TestDescribeArgFromSpec(unittest.TestCase):
def test_real_model_spec(self):
from natcap.invest import carbon
out = spec_utils.describe_arg_from_name(
out = spec.describe_arg_from_name(
'natcap.invest.carbon', 'carbon_pools_path', 'columns', 'lucode')
expected_rst = (
'.. _carbon-pools-path-columns-lucode:\n\n' +
@ -278,7 +278,7 @@ def _generate_files_from_spec(output_spec, workspace):
"""A utility function to support the metadata test."""
for spec_data in output_spec:
print(spec_data.__class__)
if spec_data.__class__ is spec_utils.DirectoryOutput:
if spec_data.__class__ is spec.DirectoryOutput:
os.mkdir(os.path.join(workspace, spec_data.id))
_generate_files_from_spec(
spec_data.contents, os.path.join(workspace, spec_data.id))
@ -319,21 +319,21 @@ class TestMetadataFromSpec(unittest.TestCase):
# An example invest output spec
output_spec = [
spec_utils.DirectoryOutput(
spec.DirectoryOutput(
id='output',
contents=spec_utils.Contents(
spec_utils.SingleBandRasterOutput(
contents=spec.Contents(
spec.SingleBandRasterOutput(
id="urban_nature_supply_percapita.tif",
about="The calculated supply per capita of urban nature.",
band=spec_utils.NumberInput(units=u.m**2)
band=spec.NumberInput(units=u.m**2)
),
spec_utils.VectorOutput(
spec.VectorOutput(
id="admin_boundaries.gpkg",
about=("A copy of the user's administrative boundaries "
"vector with a single layer."),
geometries=spec_utils.POLYGONS,
fields=spec_utils.Fields(
spec_utils.NumberInput(
geometries=spec.POLYGONS,
fields=spec.Fields(
spec.NumberInput(
id="SUP_DEMadm_cap",
units=u.m**2/u.person,
about="The average urban nature supply/demand"
@ -342,10 +342,10 @@ class TestMetadataFromSpec(unittest.TestCase):
)
)
),
spec_utils.DirectoryOutput(
spec.DirectoryOutput(
id='intermediate',
contents=spec_utils.Contents(
spec_utils.build_output_spec('taskgraph_cache', spec_utils.TASKGRAPH_DIR)
contents=spec.Contents(
spec.build_output_spec('taskgraph_cache', spec.TASKGRAPH_DIR)
)
)
]
@ -356,7 +356,7 @@ class TestMetadataFromSpec(unittest.TestCase):
model_module = types.SimpleNamespace(
__name__='urban_nature_access',
execute=lambda: None,
MODEL_SPEC=spec_utils.ModelSpec(
MODEL_SPEC=spec.ModelSpec(
model_id='urban_nature_access',
model_title='Urban Nature Access',
userguide='',
@ -370,7 +370,7 @@ class TestMetadataFromSpec(unittest.TestCase):
args_dict = {'workspace_dir': self.workspace_dir}
spec_utils.generate_metadata(model_module, args_dict)
spec.generate_metadata(model_module, args_dict)
files, messages = geometamaker.validate_dir(
self.workspace_dir, recursive=True)
self.assertEqual(len(files), 2)

View File

@ -30,7 +30,7 @@ class UsageLoggingTests(unittest.TestCase):
"""Usage logger test that we can extract bounding boxes."""
from natcap.invest import utils
from natcap.invest import usage
from natcap.invest import spec_utils
from natcap.invest import spec
srs = osr.SpatialReference()
srs.ImportFromEPSG(32731) # WGS84 / UTM zone 31s
@ -65,15 +65,15 @@ class UsageLoggingTests(unittest.TestCase):
'blank_vector_path': '',
}
model_spec = spec_utils.ModelSpec(
model_spec = spec.ModelSpec(
model_id='', model_title='', userguide=None,
aliases=None, ui_spec=spec_utils.UISpec(order=[], hidden={}),
aliases=None, ui_spec=spec.UISpec(order=[], hidden={}),
inputs=[
spec_utils.SingleBandRasterInput(id='raster', band=spec_utils.Input()),
spec_utils.VectorInput(id='vector', geometries={}, fields={}),
spec_utils.StringInput(id='not_a_gis_input'),
spec_utils.SingleBandRasterInput(id='blank_raster_path', band=spec_utils.Input()),
spec_utils.VectorInput(id='blank_vector_path', geometries={}, fields={})
spec.SingleBandRasterInput(id='raster', band=spec.Input()),
spec.VectorInput(id='vector', geometries={}, fields={}),
spec.StringInput(id='not_a_gis_input'),
spec.SingleBandRasterInput(id='blank_raster_path', band=spec.Input()),
spec.VectorInput(id='blank_vector_path', geometries={}, fields={})
],
outputs={},
args_with_spatial_overlap=None)

View File

@ -18,8 +18,8 @@ from osgeo import gdal
from osgeo import ogr
from osgeo import osr
from natcap.invest import spec_utils
from natcap.invest.spec_utils import (
from natcap.invest import spec
from natcap.invest.spec import (
u,
ModelSpec,
UISpec,
@ -274,11 +274,11 @@ class ValidatorTest(unittest.TestCase):
def test_n_workers(self):
"""Validation: validation error returned on invalid n_workers."""
from natcap.invest import spec_utils
from natcap.invest import spec
from natcap.invest import validation
args_spec = model_spec_with_defaults(inputs=[
spec_utils.build_input_spec('n_workers', spec_utils.N_WORKERS)])
spec.build_input_spec('n_workers', spec.N_WORKERS)])
@validation.invest_validator
def validate(args, limit_to=None):
@ -295,7 +295,7 @@ class ValidatorTest(unittest.TestCase):
from natcap.invest import validation
# both args and the kwarg should be passed to the function
@spec_utils.timeout
@spec.timeout
def func(arg1, arg2, kwarg=None):
self.assertEqual(kwarg, 'kwarg')
time.sleep(1)
@ -308,7 +308,7 @@ class ValidatorTest(unittest.TestCase):
from natcap.invest import validation
# both args and the kwarg should be passed to the function
@spec_utils.timeout
@spec.timeout
def func(arg):
time.sleep(6)
@ -556,7 +556,7 @@ class RasterValidation(unittest.TestCase):
def test_raster_incorrect_units(self):
"""Validation: test when a raster projection has wrong units."""
from natcap.invest import spec_utils
from natcap.invest import spec
from natcap.invest import validation
# Use EPSG:32066 # NAD27 / BLM 16N (in US Survey Feet)
@ -569,7 +569,7 @@ class RasterValidation(unittest.TestCase):
raster = None
error_msg = SingleBandRasterInput(
band=Input(), projected=True, projection_units=spec_utils.u.meter
band=Input(), projected=True, projection_units=spec.u.meter
).validate(filepath)
expected_msg = validation.MESSAGES['WRONG_PROJECTION_UNIT'].format(
unit_a='meter', unit_b='us_survey_foot')
@ -645,7 +645,7 @@ class VectorValidation(unittest.TestCase):
def test_vector_projected_in_m(self):
"""Validation: test that a vector's projection has expected units."""
from natcap.invest import spec_utils
from natcap.invest import spec
from natcap.invest import validation
driver = gdal.GetDriverByName('GPKG')
@ -659,14 +659,14 @@ class VectorValidation(unittest.TestCase):
vector = None
error_msg = VectorInput(
fields={}, geometries={'POINT'}, projected=True, projection_units=spec_utils.u.foot
fields={}, geometries={'POINT'}, projected=True, projection_units=spec.u.foot
).validate(filepath)
expected_msg = validation.MESSAGES['WRONG_PROJECTION_UNIT'].format(
unit_a='foot', unit_b='metre')
self.assertEqual(error_msg, expected_msg)
self.assertIsNone(VectorInput(
fields={}, geometries={'POINT'}, projected=True, projection_units=spec_utils.u.meter
fields={}, geometries={'POINT'}, projected=True, projection_units=spec.u.meter
).validate(filepath))
def test_wrong_geom_type(self):
@ -703,7 +703,7 @@ class FreestyleStringValidation(unittest.TestCase):
def test_regexp(self):
"""Validation: test that we can check regex patterns on strings."""
from natcap.invest import validation
from natcap.invest.spec_utils import SUFFIX
from natcap.invest.spec import SUFFIX
self.assertEqual(
None, StringInput(regexp='^1.[0-9]+$').validate(1.234))
@ -926,13 +926,13 @@ class CSVValidation(unittest.TestCase):
# define a side effect for the mock that will sleep
# for longer than the allowed timeout
@spec_utils.timeout
@spec.timeout
def delay(*args, **kwargs):
time.sleep(7)
return []
# replace the validation.check_csv with the mock function, and try to validate
with unittest.mock.patch('natcap.invest.spec_utils.CSVInput.validate', delay):
with unittest.mock.patch('natcap.invest.spec.CSVInput.validate', delay):
with warnings.catch_warnings(record=True) as ws:
# cause all warnings to always be triggered
warnings.simplefilter("always")
@ -945,26 +945,26 @@ class CSVValidation(unittest.TestCase):
from natcap.invest import validation
expected_headers = ['hello', '1']
actual = ['hello', '1', '2']
result = spec_utils.check_headers(expected_headers, actual)
result = spec.check_headers(expected_headers, actual)
self.assertEqual(result, None)
# each pattern should match at least one header
actual = ['1', '2']
result = spec_utils.check_headers(expected_headers, actual)
result = spec.check_headers(expected_headers, actual)
expected_msg = validation.MESSAGES['MATCHED_NO_HEADERS'].format(
header='header', header_name='hello')
self.assertEqual(result, expected_msg)
# duplicate headers that match a pattern are not allowed
actual = ['hello', '1', '1']
result = spec_utils.check_headers(expected_headers, actual, 'column')
result = spec.check_headers(expected_headers, actual, 'column')
expected_msg = validation.MESSAGES['DUPLICATE_HEADER'].format(
header='column', header_name='1', number=2)
self.assertEqual(result, expected_msg)
# duplicate headers that don't match a pattern are allowed
actual = ['hello', '1', 'x', 'x']
result = spec_utils.check_headers(expected_headers, actual)
result = spec.check_headers(expected_headers, actual)
self.assertEqual(result, None)
@ -1745,7 +1745,7 @@ class TestValidationFromSpec(unittest.TestCase):
def test_conditionally_required_vector_fields(self):
"""Validation: conditionally required vector fields."""
from natcap.invest import spec_utils
from natcap.invest import spec
from natcap.invest import validation
spec = model_spec_with_defaults(inputs=[
NumberInput(
@ -1755,7 +1755,7 @@ class TestValidationFromSpec(unittest.TestCase):
),
VectorInput(
id="vector",
geometries=spec_utils.POINTS,
geometries=spec.POINTS,
fields=Fields(
RatioInput(id="field_a"),
RatioInput(id="field_b", required="some_number == 2")
@ -1930,7 +1930,7 @@ class TestValidationFromSpec(unittest.TestCase):
args = {'number_a': 1}
# Patch in a new function that raises an exception
with unittest.mock.patch('natcap.invest.spec_utils.NumberInput.validate',
with unittest.mock.patch('natcap.invest.spec.NumberInput.validate',
Mock(side_effect=ValueError('foo'))):
validation_warnings = validation.validate(args, spec)
@ -2288,7 +2288,7 @@ class TestValidationFromSpec(unittest.TestCase):
Input(id='c', required='conditional statement'),
Input(id='d', required=False)
)
patterns = spec_utils.get_headers_to_validate(spec)
patterns = spec.get_headers_to_validate(spec)
# should only get the patterns that are static and always required
self.assertEqual(sorted(patterns), ['a'])