enable gdal.UseExceptions in entrypoints and tests
Enable gdal exceptions in the CLI and server. Update all models to expect and handle gdal exceptions where they are now raised.
This commit is contained in:
parent
3fc95d21e2
commit
ad59b739d2
|
@ -19,6 +19,7 @@ from natcap.invest import set_locale
|
|||
from natcap.invest import spec_utils
|
||||
from natcap.invest import ui_server
|
||||
from natcap.invest import utils
|
||||
from pygeoprocessing.geoprocessing_core import GDALUseExceptions
|
||||
|
||||
|
||||
DEFAULT_EXIT_CODE = 1
|
||||
|
@ -218,267 +219,268 @@ def main(user_args=None):
|
|||
so models may be run in this way without having GUI packages
|
||||
installed.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
'Integrated Valuation of Ecosystem Services and Tradeoffs. '
|
||||
'InVEST (Integrated Valuation of Ecosystem Services and '
|
||||
'Tradeoffs) is a family of tools for quantifying the values of '
|
||||
'natural capital in clear, credible, and practical ways. In '
|
||||
'promising a return (of societal benefits) on investments in '
|
||||
'nature, the scientific community needs to deliver knowledge and '
|
||||
'tools to quantify and forecast this return. InVEST enables '
|
||||
'decision-makers to quantify the importance of natural capital, '
|
||||
'to assess the tradeoffs associated with alternative choices, '
|
||||
'and to integrate conservation and human development. \n\n'
|
||||
'Older versions of InVEST ran as script tools in the ArcGIS '
|
||||
'ArcToolBox environment, but have almost all been ported over to '
|
||||
'a purely open-source python environment.'),
|
||||
prog='invest'
|
||||
)
|
||||
parser.add_argument('--version', action='version',
|
||||
version=natcap.invest.__version__)
|
||||
verbosity_group = parser.add_mutually_exclusive_group()
|
||||
verbosity_group.add_argument(
|
||||
'-v', '--verbose', dest='verbosity', default=0, action='count',
|
||||
help=('Increase verbosity. Affects how much logging is printed to '
|
||||
'the console and (if running in headless mode) how much is '
|
||||
'written to the logfile.'))
|
||||
verbosity_group.add_argument(
|
||||
'--debug', dest='log_level', default=logging.ERROR,
|
||||
action='store_const', const=logging.DEBUG,
|
||||
help='Enable debug logging. Alias for -vvv')
|
||||
with GDALUseExceptions():
|
||||
parser = argparse.ArgumentParser(
|
||||
description=(
|
||||
'Integrated Valuation of Ecosystem Services and Tradeoffs. '
|
||||
'InVEST (Integrated Valuation of Ecosystem Services and '
|
||||
'Tradeoffs) is a family of tools for quantifying the values of '
|
||||
'natural capital in clear, credible, and practical ways. In '
|
||||
'promising a return (of societal benefits) on investments in '
|
||||
'nature, the scientific community needs to deliver knowledge and '
|
||||
'tools to quantify and forecast this return. InVEST enables '
|
||||
'decision-makers to quantify the importance of natural capital, '
|
||||
'to assess the tradeoffs associated with alternative choices, '
|
||||
'and to integrate conservation and human development. \n\n'
|
||||
'Older versions of InVEST ran as script tools in the ArcGIS '
|
||||
'ArcToolBox environment, but have almost all been ported over to '
|
||||
'a purely open-source python environment.'),
|
||||
prog='invest'
|
||||
)
|
||||
parser.add_argument('--version', action='version',
|
||||
version=natcap.invest.__version__)
|
||||
verbosity_group = parser.add_mutually_exclusive_group()
|
||||
verbosity_group.add_argument(
|
||||
'-v', '--verbose', dest='verbosity', default=0, action='count',
|
||||
help=('Increase verbosity. Affects how much logging is printed to '
|
||||
'the console and (if running in headless mode) how much is '
|
||||
'written to the logfile.'))
|
||||
verbosity_group.add_argument(
|
||||
'--debug', dest='log_level', default=logging.ERROR,
|
||||
action='store_const', const=logging.DEBUG,
|
||||
help='Enable debug logging. Alias for -vvv')
|
||||
|
||||
parser.add_argument(
|
||||
'--taskgraph-log-level', dest='taskgraph_log_level', default='ERROR',
|
||||
type=str, choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
|
||||
help=('Set the logging level for Taskgraph. Affects how much logging '
|
||||
'Taskgraph prints to the console and (if running in headless '
|
||||
'mode) how much is written to the logfile.'))
|
||||
parser.add_argument(
|
||||
'--taskgraph-log-level', dest='taskgraph_log_level', default='ERROR',
|
||||
type=str, choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
|
||||
help=('Set the logging level for Taskgraph. Affects how much logging '
|
||||
'Taskgraph prints to the console and (if running in headless '
|
||||
'mode) how much is written to the logfile.'))
|
||||
|
||||
# list the language code and corresponding language name (in that language)
|
||||
supported_languages_string = ', '.join([
|
||||
f'{locale} ({display_name})'
|
||||
for locale, display_name in natcap.invest.LOCALE_NAME_MAP.items()])
|
||||
parser.add_argument(
|
||||
'-L', '--language', default='en',
|
||||
choices=natcap.invest.LOCALES,
|
||||
help=('Choose a language. Model specs, names, and validation messages '
|
||||
'will be translated. Log messages are not translated. Value '
|
||||
'should be an ISO 639-1 language code. Supported options are: '
|
||||
f'{supported_languages_string}.'))
|
||||
# list the language code and corresponding language name (in that language)
|
||||
supported_languages_string = ', '.join([
|
||||
f'{locale} ({display_name})'
|
||||
for locale, display_name in natcap.invest.LOCALE_NAME_MAP.items()])
|
||||
parser.add_argument(
|
||||
'-L', '--language', default='en',
|
||||
choices=natcap.invest.LOCALES,
|
||||
help=('Choose a language. Model specs, names, and validation messages '
|
||||
'will be translated. Log messages are not translated. Value '
|
||||
'should be an ISO 639-1 language code. Supported options are: '
|
||||
f'{supported_languages_string}.'))
|
||||
|
||||
subparsers = parser.add_subparsers(dest='subcommand')
|
||||
subparsers = parser.add_subparsers(dest='subcommand')
|
||||
|
||||
listmodels_subparser = subparsers.add_parser(
|
||||
'list', help='List the available InVEST models')
|
||||
listmodels_subparser.add_argument(
|
||||
'--json', action='store_true', help='Write output as a JSON object')
|
||||
listmodels_subparser = subparsers.add_parser(
|
||||
'list', help='List the available InVEST models')
|
||||
listmodels_subparser.add_argument(
|
||||
'--json', action='store_true', help='Write output as a JSON object')
|
||||
|
||||
run_subparser = subparsers.add_parser(
|
||||
'run', help='Run an InVEST model')
|
||||
# Recognize '--headless' for backwards compatibility.
|
||||
# This arg is otherwise unused.
|
||||
run_subparser.add_argument(
|
||||
'-l', '--headless', action='store_true',
|
||||
help=argparse.SUPPRESS)
|
||||
run_subparser.add_argument(
|
||||
'-d', '--datastack', default=None, nargs='?',
|
||||
help=('Run the specified model with this JSON datastack. '
|
||||
'Required if using --headless'))
|
||||
run_subparser.add_argument(
|
||||
'-w', '--workspace', default=None, nargs='?',
|
||||
help=('The workspace in which outputs will be saved. '
|
||||
'Required if using --headless'))
|
||||
run_subparser.add_argument(
|
||||
'model', action=SelectModelAction, # Assert valid model name
|
||||
help=('The model to run. Use "invest list" to list the available '
|
||||
'models.'))
|
||||
run_subparser = subparsers.add_parser(
|
||||
'run', help='Run an InVEST model')
|
||||
# Recognize '--headless' for backwards compatibility.
|
||||
# This arg is otherwise unused.
|
||||
run_subparser.add_argument(
|
||||
'-l', '--headless', action='store_true',
|
||||
help=argparse.SUPPRESS)
|
||||
run_subparser.add_argument(
|
||||
'-d', '--datastack', default=None, nargs='?',
|
||||
help=('Run the specified model with this JSON datastack. '
|
||||
'Required if using --headless'))
|
||||
run_subparser.add_argument(
|
||||
'-w', '--workspace', default=None, nargs='?',
|
||||
help=('The workspace in which outputs will be saved. '
|
||||
'Required if using --headless'))
|
||||
run_subparser.add_argument(
|
||||
'model', action=SelectModelAction, # Assert valid model name
|
||||
help=('The model to run. Use "invest list" to list the available '
|
||||
'models.'))
|
||||
|
||||
validate_subparser = subparsers.add_parser(
|
||||
'validate', help=(
|
||||
'Validate the parameters of a datastack'))
|
||||
validate_subparser.add_argument(
|
||||
'--json', action='store_true', help='Write output as a JSON object')
|
||||
validate_subparser.add_argument(
|
||||
'datastack', help=('Path to a JSON datastack.'))
|
||||
validate_subparser = subparsers.add_parser(
|
||||
'validate', help=(
|
||||
'Validate the parameters of a datastack'))
|
||||
validate_subparser.add_argument(
|
||||
'--json', action='store_true', help='Write output as a JSON object')
|
||||
validate_subparser.add_argument(
|
||||
'datastack', help=('Path to a JSON datastack.'))
|
||||
|
||||
getspec_subparser = subparsers.add_parser(
|
||||
'getspec', help=('Get the specification of a model.'))
|
||||
getspec_subparser.add_argument(
|
||||
'--json', action='store_true', help='Write output as a JSON object')
|
||||
getspec_subparser.add_argument(
|
||||
'model', action=SelectModelAction, # Assert valid model name
|
||||
help=('The model for which the spec should be fetched. Use "invest '
|
||||
'list" to list the available models.'))
|
||||
getspec_subparser = subparsers.add_parser(
|
||||
'getspec', help=('Get the specification of a model.'))
|
||||
getspec_subparser.add_argument(
|
||||
'--json', action='store_true', help='Write output as a JSON object')
|
||||
getspec_subparser.add_argument(
|
||||
'model', action=SelectModelAction, # Assert valid model name
|
||||
help=('The model for which the spec should be fetched. Use "invest '
|
||||
'list" to list the available models.'))
|
||||
|
||||
serve_subparser = subparsers.add_parser(
|
||||
'serve', help=('Start the flask app on the localhost.'))
|
||||
serve_subparser.add_argument(
|
||||
'--port', type=int, default=56789,
|
||||
help='Port number for the Flask server')
|
||||
serve_subparser = subparsers.add_parser(
|
||||
'serve', help=('Start the flask app on the localhost.'))
|
||||
serve_subparser.add_argument(
|
||||
'--port', type=int, default=56789,
|
||||
help='Port number for the Flask server')
|
||||
|
||||
export_py_subparser = subparsers.add_parser(
|
||||
'export-py', help=('Save a python script that executes a model.'))
|
||||
export_py_subparser.add_argument(
|
||||
'model', action=SelectModelAction, # Assert valid model name
|
||||
help=('The model that the python script will execute. Use "invest '
|
||||
'list" to list the available models.'))
|
||||
export_py_subparser.add_argument(
|
||||
'-f', '--filepath', default=None,
|
||||
help='Define a location for the saved .py file')
|
||||
export_py_subparser = subparsers.add_parser(
|
||||
'export-py', help=('Save a python script that executes a model.'))
|
||||
export_py_subparser.add_argument(
|
||||
'model', action=SelectModelAction, # Assert valid model name
|
||||
help=('The model that the python script will execute. Use "invest '
|
||||
'list" to list the available models.'))
|
||||
export_py_subparser.add_argument(
|
||||
'-f', '--filepath', default=None,
|
||||
help='Define a location for the saved .py file')
|
||||
|
||||
args = parser.parse_args(user_args)
|
||||
natcap.invest.set_locale(args.language)
|
||||
args = parser.parse_args(user_args)
|
||||
natcap.invest.set_locale(args.language)
|
||||
|
||||
root_logger = logging.getLogger()
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
formatter = logging.Formatter(
|
||||
fmt='%(asctime)s %(name)-18s %(levelname)-8s %(message)s',
|
||||
datefmt='%m/%d/%Y %H:%M:%S ')
|
||||
handler.setFormatter(formatter)
|
||||
root_logger = logging.getLogger()
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
formatter = logging.Formatter(
|
||||
fmt='%(asctime)s %(name)-18s %(levelname)-8s %(message)s',
|
||||
datefmt='%m/%d/%Y %H:%M:%S ')
|
||||
handler.setFormatter(formatter)
|
||||
|
||||
# Set the log level based on what the user provides in the available
|
||||
# arguments. Verbosity: the more v's the lower the logging threshold.
|
||||
# If --debug is used, the logging threshold is 10.
|
||||
# If the user goes lower than logging.DEBUG, default to logging.DEBUG.
|
||||
log_level = min(args.log_level, logging.ERROR - (args.verbosity*10))
|
||||
handler.setLevel(max(log_level, logging.DEBUG)) # don't go below DEBUG
|
||||
root_logger.addHandler(handler)
|
||||
LOGGER.info('Setting handler log level to %s', log_level)
|
||||
# Set the log level based on what the user provides in the available
|
||||
# arguments. Verbosity: the more v's the lower the logging threshold.
|
||||
# If --debug is used, the logging threshold is 10.
|
||||
# If the user goes lower than logging.DEBUG, default to logging.DEBUG.
|
||||
log_level = min(args.log_level, logging.ERROR - (args.verbosity*10))
|
||||
handler.setLevel(max(log_level, logging.DEBUG)) # don't go below DEBUG
|
||||
root_logger.addHandler(handler)
|
||||
LOGGER.info('Setting handler log level to %s', log_level)
|
||||
|
||||
# Set the log level for taskgraph.
|
||||
taskgraph_log_level = logging.getLevelName(args.taskgraph_log_level.upper())
|
||||
logging.getLogger('taskgraph').setLevel(taskgraph_log_level)
|
||||
LOGGER.debug('Setting taskgraph log level to %s', taskgraph_log_level)
|
||||
# Set the log level for taskgraph.
|
||||
taskgraph_log_level = logging.getLevelName(args.taskgraph_log_level.upper())
|
||||
logging.getLogger('taskgraph').setLevel(taskgraph_log_level)
|
||||
LOGGER.debug('Setting taskgraph log level to %s', taskgraph_log_level)
|
||||
|
||||
# FYI: Root logger by default has a level of logging.WARNING.
|
||||
# To capture ALL logging produced in this system at runtime, use this:
|
||||
# logging.getLogger().setLevel(logging.DEBUG)
|
||||
# Also FYI: using logging.DEBUG means that the logger will defer to
|
||||
# the setting of the parent logger.
|
||||
logging.getLogger('natcap').setLevel(logging.DEBUG)
|
||||
# FYI: Root logger by default has a level of logging.WARNING.
|
||||
# To capture ALL logging produced in this system at runtime, use this:
|
||||
# logging.getLogger().setLevel(logging.DEBUG)
|
||||
# Also FYI: using logging.DEBUG means that the logger will defer to
|
||||
# the setting of the parent logger.
|
||||
logging.getLogger('natcap').setLevel(logging.DEBUG)
|
||||
|
||||
if args.subcommand == 'list':
|
||||
# reevaluate the model names in the new language
|
||||
importlib.reload(model_metadata)
|
||||
if args.json:
|
||||
message = build_model_list_json()
|
||||
else:
|
||||
message = build_model_list_table()
|
||||
|
||||
sys.stdout.write(message)
|
||||
parser.exit()
|
||||
|
||||
if args.subcommand == 'validate':
|
||||
try:
|
||||
parsed_datastack = datastack.extract_parameter_set(args.datastack)
|
||||
except Exception as error:
|
||||
parser.exit(
|
||||
1, "Error when parsing JSON datastack:\n " + str(error))
|
||||
|
||||
# reload validation module first so it's also in the correct language
|
||||
importlib.reload(importlib.import_module('natcap.invest.validation'))
|
||||
model_module = importlib.reload(importlib.import_module(
|
||||
name=parsed_datastack.model_name))
|
||||
|
||||
try:
|
||||
validation_result = model_module.validate(parsed_datastack.args)
|
||||
except KeyError as missing_keys_error:
|
||||
if args.subcommand == 'list':
|
||||
# reevaluate the model names in the new language
|
||||
importlib.reload(model_metadata)
|
||||
if args.json:
|
||||
message = json.dumps(
|
||||
{'validation_results': {
|
||||
str(list(missing_keys_error.args)): 'Key is missing'}})
|
||||
message = build_model_list_json()
|
||||
else:
|
||||
message = ('Datastack is missing keys:\n ' +
|
||||
str(missing_keys_error.args))
|
||||
message = build_model_list_table()
|
||||
|
||||
# Missing keys have an exit code of 1 because that would indicate
|
||||
# probably programmer error.
|
||||
sys.stdout.write(message)
|
||||
parser.exit(1)
|
||||
except Exception as error:
|
||||
parser.exit(
|
||||
1, ('Datastack could not be validated:\n ' +
|
||||
str(error)))
|
||||
parser.exit()
|
||||
|
||||
# Even validation errors will have an exit code of 0
|
||||
if args.json:
|
||||
message = json.dumps({
|
||||
'validation_results': validation_result})
|
||||
else:
|
||||
message = pprint.pformat(validation_result)
|
||||
|
||||
sys.stdout.write(message)
|
||||
parser.exit(0)
|
||||
|
||||
if args.subcommand == 'getspec':
|
||||
target_model = model_metadata.MODEL_METADATA[args.model].pyname
|
||||
model_module = importlib.reload(
|
||||
importlib.import_module(name=target_model))
|
||||
spec = model_module.MODEL_SPEC
|
||||
|
||||
if args.json:
|
||||
message = spec_utils.serialize_args_spec(spec)
|
||||
else:
|
||||
message = pprint.pformat(spec)
|
||||
sys.stdout.write(message)
|
||||
parser.exit(0)
|
||||
|
||||
if args.subcommand == 'run':
|
||||
if args.headless:
|
||||
warnings.warn(
|
||||
'--headless (-l) is now the default (and only) behavior '
|
||||
'for `invest run`. This flag will not be recognized '
|
||||
'in the future.', FutureWarning, stacklevel=2) # 2 for brevity
|
||||
if not args.datastack:
|
||||
parser.exit(1, 'Datastack required for execution.')
|
||||
|
||||
try:
|
||||
parsed_datastack = datastack.extract_parameter_set(args.datastack)
|
||||
except Exception as error:
|
||||
parser.exit(
|
||||
1, "Error when parsing JSON datastack:\n " + str(error))
|
||||
|
||||
if not args.workspace:
|
||||
if ('workspace_dir' not in parsed_datastack.args or
|
||||
parsed_datastack.args['workspace_dir'] in ['', None]):
|
||||
if args.subcommand == 'validate':
|
||||
try:
|
||||
parsed_datastack = datastack.extract_parameter_set(args.datastack)
|
||||
except Exception as error:
|
||||
parser.exit(
|
||||
1, ('Workspace must be defined at the command line '
|
||||
'or in the datastack file'))
|
||||
else:
|
||||
parsed_datastack.args['workspace_dir'] = args.workspace
|
||||
1, "Error when parsing JSON datastack:\n " + str(error))
|
||||
|
||||
target_model = model_metadata.MODEL_METADATA[args.model].pyname
|
||||
model_module = importlib.import_module(name=target_model)
|
||||
LOGGER.info('Imported target %s from %s',
|
||||
model_module.__name__, model_module)
|
||||
# reload validation module first so it's also in the correct language
|
||||
importlib.reload(importlib.import_module('natcap.invest.validation'))
|
||||
model_module = importlib.reload(importlib.import_module(
|
||||
name=parsed_datastack.model_name))
|
||||
|
||||
with utils.prepare_workspace(parsed_datastack.args['workspace_dir'],
|
||||
name=parsed_datastack.model_name,
|
||||
logging_level=log_level):
|
||||
LOGGER.log(datastack.ARGS_LOG_LEVEL,
|
||||
'Starting model with parameters: \n%s',
|
||||
datastack.format_args_dict(parsed_datastack.args,
|
||||
parsed_datastack.model_name))
|
||||
try:
|
||||
validation_result = model_module.validate(parsed_datastack.args)
|
||||
except KeyError as missing_keys_error:
|
||||
if args.json:
|
||||
message = json.dumps(
|
||||
{'validation_results': {
|
||||
str(list(missing_keys_error.args)): 'Key is missing'}})
|
||||
else:
|
||||
message = ('Datastack is missing keys:\n ' +
|
||||
str(missing_keys_error.args))
|
||||
|
||||
# We're deliberately not validating here because the user
|
||||
# can just call ``invest validate <datastack>`` to validate.
|
||||
#
|
||||
# Exceptions will already be logged to the logfile but will ALSO be
|
||||
# written to stdout if this exception is uncaught. This is by
|
||||
# design.
|
||||
model_module.execute(parsed_datastack.args)
|
||||
# Missing keys have an exit code of 1 because that would indicate
|
||||
# probably programmer error.
|
||||
sys.stdout.write(message)
|
||||
parser.exit(1)
|
||||
except Exception as error:
|
||||
parser.exit(
|
||||
1, ('Datastack could not be validated:\n ' +
|
||||
str(error)))
|
||||
|
||||
if args.subcommand == 'serve':
|
||||
ui_server.app.run(port=args.port)
|
||||
parser.exit(0)
|
||||
# Even validation errors will have an exit code of 0
|
||||
if args.json:
|
||||
message = json.dumps({
|
||||
'validation_results': validation_result})
|
||||
else:
|
||||
message = pprint.pformat(validation_result)
|
||||
|
||||
if args.subcommand == 'export-py':
|
||||
target_filepath = args.filepath
|
||||
if not args.filepath:
|
||||
target_filepath = f'{args.model}_execute.py'
|
||||
export_to_python(target_filepath, args.model)
|
||||
parser.exit()
|
||||
sys.stdout.write(message)
|
||||
parser.exit(0)
|
||||
|
||||
if args.subcommand == 'getspec':
|
||||
target_model = model_metadata.MODEL_METADATA[args.model].pyname
|
||||
model_module = importlib.reload(
|
||||
importlib.import_module(name=target_model))
|
||||
spec = model_module.MODEL_SPEC
|
||||
|
||||
if args.json:
|
||||
message = spec_utils.serialize_args_spec(spec)
|
||||
else:
|
||||
message = pprint.pformat(spec)
|
||||
sys.stdout.write(message)
|
||||
parser.exit(0)
|
||||
|
||||
if args.subcommand == 'run':
|
||||
if args.headless:
|
||||
warnings.warn(
|
||||
'--headless (-l) is now the default (and only) behavior '
|
||||
'for `invest run`. This flag will not be recognized '
|
||||
'in the future.', FutureWarning, stacklevel=2) # 2 for brevity
|
||||
if not args.datastack:
|
||||
parser.exit(1, 'Datastack required for execution.')
|
||||
|
||||
try:
|
||||
parsed_datastack = datastack.extract_parameter_set(args.datastack)
|
||||
except Exception as error:
|
||||
parser.exit(
|
||||
1, "Error when parsing JSON datastack:\n " + str(error))
|
||||
|
||||
if not args.workspace:
|
||||
if ('workspace_dir' not in parsed_datastack.args or
|
||||
parsed_datastack.args['workspace_dir'] in ['', None]):
|
||||
parser.exit(
|
||||
1, ('Workspace must be defined at the command line '
|
||||
'or in the datastack file'))
|
||||
else:
|
||||
parsed_datastack.args['workspace_dir'] = args.workspace
|
||||
|
||||
target_model = model_metadata.MODEL_METADATA[args.model].pyname
|
||||
model_module = importlib.import_module(name=target_model)
|
||||
LOGGER.info('Imported target %s from %s',
|
||||
model_module.__name__, model_module)
|
||||
|
||||
with utils.prepare_workspace(parsed_datastack.args['workspace_dir'],
|
||||
name=parsed_datastack.model_name,
|
||||
logging_level=log_level):
|
||||
LOGGER.log(datastack.ARGS_LOG_LEVEL,
|
||||
'Starting model with parameters: \n%s',
|
||||
datastack.format_args_dict(parsed_datastack.args,
|
||||
parsed_datastack.model_name))
|
||||
|
||||
# We're deliberately not validating here because the user
|
||||
# can just call ``invest validate <datastack>`` to validate.
|
||||
#
|
||||
# Exceptions will already be logged to the logfile but will ALSO be
|
||||
# written to stdout if this exception is uncaught. This is by
|
||||
# design.
|
||||
model_module.execute(parsed_datastack.args)
|
||||
|
||||
if args.subcommand == 'serve':
|
||||
ui_server.app.run(port=args.port)
|
||||
parser.exit(0)
|
||||
|
||||
if args.subcommand == 'export-py':
|
||||
target_filepath = args.filepath
|
||||
if not args.filepath:
|
||||
target_filepath = f'{args.model}_execute.py'
|
||||
export_to_python(target_filepath, args.model)
|
||||
parser.exit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
@ -1737,15 +1737,16 @@ def extract_bathymetry_along_ray(
|
|||
iy = int((point.y - bathy_gt[3]) / bathy_gt[5])
|
||||
win_size = 1
|
||||
|
||||
value = bathy_band.ReadAsArray(
|
||||
xoff=ix, yoff=iy,
|
||||
win_xsize=win_size, win_ysize=win_size)
|
||||
if value is None:
|
||||
try:
|
||||
value = bathy_band.ReadAsArray(
|
||||
xoff=ix, yoff=iy,
|
||||
win_xsize=win_size, win_ysize=win_size)
|
||||
except RuntimeError as ex:
|
||||
location = {'xoff': ix, 'yoff': iy, 'win_xsize': win_size,
|
||||
'win_ysize': win_size}
|
||||
raise ValueError(
|
||||
f'got a {value} when trying to read bathymetry at {location}. '
|
||||
'Does the bathymetry input fully cover the fetch ray area?')
|
||||
f'Failed to read bathymetry at {location}. Does the bathymetry '
|
||||
'input fully cover the fetch ray area?') from ex
|
||||
if bathy_nodata is None or not numpy.isclose(
|
||||
value[0][0], bathy_nodata, equal_nan=True):
|
||||
bathy_values.append(value)
|
||||
|
@ -2468,25 +2469,26 @@ def search_for_vector_habitat(
|
|||
|
||||
geometry = feature.GetGeometryRef()
|
||||
if not geometry.IsValid():
|
||||
geometry = geometry.Buffer(0) # sometimes this fixes geometry
|
||||
if geometry is not None: # geometry is None if the buffer failed.
|
||||
clipped_geometry = geometry.Intersection(base_srs_clipping_geom)
|
||||
if not clipped_geometry.IsEmpty():
|
||||
if target_spatial_reference != base_spatial_reference:
|
||||
err_code = clipped_geometry.Transform(transform)
|
||||
if err_code != 0:
|
||||
LOGGER.warning(
|
||||
f"Could not transform feature from "
|
||||
f"{habitat_vector_path} to spatial reference "
|
||||
"system of AOI")
|
||||
continue
|
||||
shapely_geom = shapely.wkb.loads(
|
||||
bytes(clipped_geometry.ExportToWkb()))
|
||||
shapely_geometry_list.extend(_list_geometry(shapely_geom))
|
||||
else:
|
||||
LOGGER.warning(
|
||||
f"FID {feature.GetFID()} in {habitat_vector_path} has invalid "
|
||||
"geometry and will be excluded")
|
||||
try:
|
||||
geometry = geometry.Buffer(0) # sometimes this fixes geometry
|
||||
except RuntimeError:
|
||||
LOGGER.warning(
|
||||
f"FID {feature.GetFID()} in {habitat_vector_path} has invalid "
|
||||
"geometry and will be excluded")
|
||||
continue
|
||||
clipped_geometry = geometry.Intersection(base_srs_clipping_geom)
|
||||
if not clipped_geometry.IsEmpty():
|
||||
if target_spatial_reference != base_spatial_reference:
|
||||
err_code = clipped_geometry.Transform(transform)
|
||||
if err_code != 0:
|
||||
LOGGER.warning(
|
||||
f"Could not transform feature from "
|
||||
f"{habitat_vector_path} to spatial reference "
|
||||
"system of AOI")
|
||||
continue
|
||||
shapely_geom = shapely.wkb.loads(
|
||||
bytes(clipped_geometry.ExportToWkb()))
|
||||
shapely_geometry_list.extend(_list_geometry(shapely_geom))
|
||||
|
||||
if not shapely_geometry_list:
|
||||
LOGGER.warning(f'No valid features exist in {habitat_vector_path}')
|
||||
|
|
|
@ -782,14 +782,17 @@ def _build_spatial_index(
|
|||
# put all the polygons in the kd_tree because it's fast and simple
|
||||
for poly_feature in model_layer:
|
||||
poly_geom = poly_feature.GetGeometryRef()
|
||||
poly_centroid = poly_geom.Centroid()
|
||||
# put in row/col order since rasters are row/col indexed
|
||||
kd_points.append([poly_centroid.GetY(), poly_centroid.GetX()])
|
||||
if poly_geom.IsValid():
|
||||
poly_centroid = poly_geom.Centroid()
|
||||
# put in row/col order since rasters are row/col indexed
|
||||
kd_points.append([poly_centroid.GetY(), poly_centroid.GetX()])
|
||||
|
||||
theta_model_parameters.append([
|
||||
poly_feature.GetField(feature_id) for feature_id in
|
||||
['theta1', 'theta2', 'theta3']])
|
||||
method_model_parameter.append(poly_feature.GetField('method'))
|
||||
theta_model_parameters.append([
|
||||
poly_feature.GetField(feature_id) for feature_id in
|
||||
['theta1', 'theta2', 'theta3']])
|
||||
method_model_parameter.append(poly_feature.GetField('method'))
|
||||
else:
|
||||
LOGGER.warning(f'skipping invalid geometry {poly_geom}')
|
||||
|
||||
method_model_parameter = numpy.array(
|
||||
method_model_parameter, dtype=numpy.int32)
|
||||
|
|
|
@ -1603,26 +1603,18 @@ def _validate_same_projection(base_vector_path, table_path):
|
|||
|
||||
invalid_projections = False
|
||||
for path in data_paths:
|
||||
|
||||
def error_handler(err_level, err_no, err_msg):
|
||||
"""Empty error handler to avoid stderr output."""
|
||||
pass
|
||||
gdal.PushErrorHandler(error_handler)
|
||||
raster = gdal.OpenEx(path, gdal.OF_RASTER)
|
||||
gdal.PopErrorHandler()
|
||||
if raster is not None:
|
||||
gis_type = pygeoprocessing.get_gis_type(path)
|
||||
if gis_type == pygeoprocessing.UNKNOWN_TYPE:
|
||||
return f"{path} did not load"
|
||||
elif gis_type == pygeoprocessing.RASTER_TYPE:
|
||||
raster = gdal.OpenEx(path, gdal.OF_RASTER)
|
||||
projection_as_str = raster.GetProjection()
|
||||
ref = osr.SpatialReference()
|
||||
ref.ImportFromWkt(projection_as_str)
|
||||
raster = None
|
||||
else:
|
||||
vector = gdal.OpenEx(path, gdal.OF_VECTOR)
|
||||
if vector is None:
|
||||
return f"{path} did not load"
|
||||
layer = vector.GetLayer()
|
||||
ref = osr.SpatialReference(layer.GetSpatialRef().ExportToWkt())
|
||||
layer = None
|
||||
vector = None
|
||||
if not base_ref.IsSame(ref):
|
||||
invalid_projections = True
|
||||
if invalid_projections:
|
||||
|
|
|
@ -50,7 +50,9 @@ def _log_gdal_errors(*args, **kwargs):
|
|||
"""Log error messages to osgeo.
|
||||
|
||||
All error messages are logged with reasonable ``logging`` levels based
|
||||
on the GDAL error level.
|
||||
on the GDAL error level. While we are now using ``gdal.UseExceptions()``,
|
||||
we still need this to handle GDAL logging that does not get raised as
|
||||
an exception.
|
||||
|
||||
Note:
|
||||
This function is designed to accept any number of positional and
|
||||
|
|
|
@ -304,19 +304,16 @@ def check_raster(filepath, projected=False, projection_units=None, **kwargs):
|
|||
if file_warning:
|
||||
return file_warning
|
||||
|
||||
gdal.PushErrorHandler('CPLQuietErrorHandler')
|
||||
gdal_dataset = gdal.OpenEx(filepath, gdal.OF_RASTER)
|
||||
gdal.PopErrorHandler()
|
||||
|
||||
if gdal_dataset is None:
|
||||
try:
|
||||
gdal_dataset = gdal.OpenEx(filepath, gdal.OF_RASTER)
|
||||
except RuntimeError:
|
||||
return MESSAGES['NOT_GDAL_RASTER']
|
||||
|
||||
# Check that an overview .ovr file wasn't opened.
|
||||
if os.path.splitext(filepath)[1] == '.ovr':
|
||||
return MESSAGES['OVR_FILE']
|
||||
|
||||
srs = osr.SpatialReference()
|
||||
srs.ImportFromWkt(gdal_dataset.GetProjection())
|
||||
|
||||
srs = gdal_dataset.GetSpatialRef()
|
||||
projection_warning = _check_projection(srs, projected, projection_units)
|
||||
if projection_warning:
|
||||
gdal_dataset = None
|
||||
|
@ -378,9 +375,10 @@ def check_vector(filepath, geometries, fields=None, projected=False,
|
|||
if file_warning:
|
||||
return file_warning
|
||||
|
||||
gdal.PushErrorHandler('CPLQuietErrorHandler')
|
||||
gdal_dataset = gdal.OpenEx(filepath, gdal.OF_VECTOR)
|
||||
gdal.PopErrorHandler()
|
||||
try:
|
||||
gdal_dataset = gdal.OpenEx(filepath, gdal.OF_VECTOR)
|
||||
except RuntimeError:
|
||||
return MESSAGES['NOT_GDAL_VECTOR']
|
||||
|
||||
geom_map = {
|
||||
'POINT': [ogr.wkbPoint, ogr.wkbPointM, ogr.wkbPointZM,
|
||||
|
@ -402,9 +400,6 @@ def check_vector(filepath, geometries, fields=None, projected=False,
|
|||
for geom in geometries:
|
||||
allowed_geom_types += geom_map[geom]
|
||||
|
||||
if gdal_dataset is None:
|
||||
return MESSAGES['NOT_GDAL_VECTOR']
|
||||
|
||||
# NOTE: this only checks the layer geometry type, not the types of the
|
||||
# actual geometries (layer.GetGeometryTypes()). This is probably equivalent
|
||||
# in most cases, and it's more efficient than checking every geometry, but
|
||||
|
|
|
@ -1148,23 +1148,18 @@ def _copy_vector_or_raster(base_file_path, target_file_path):
|
|||
ValueError if the base file can't be opened by GDAL.
|
||||
|
||||
"""
|
||||
# Open the file as raster first
|
||||
source_dataset = gdal.OpenEx(base_file_path, gdal.OF_RASTER)
|
||||
target_driver_name = _RASTER_DRIVER_NAME
|
||||
if source_dataset is None:
|
||||
# File didn't open as a raster; assume it's a vector
|
||||
gis_type = pygeoprocessing.get_gis_type(base_file_path)
|
||||
if gis_type == pygeoprocessing.RASTER_TYPE:
|
||||
source_dataset = gdal.OpenEx(base_file_path, gdal.OF_RASTER)
|
||||
target_driver_name = _RASTER_DRIVER_NAME
|
||||
elif gis_type == pygeoprocessing.VECTOR_TYPE:
|
||||
source_dataset = gdal.OpenEx(base_file_path, gdal.OF_VECTOR)
|
||||
target_driver_name = _VECTOR_DRIVER_NAME
|
||||
|
||||
# Raise an exception if the file can't be opened by GDAL
|
||||
if source_dataset is None:
|
||||
raise ValueError(
|
||||
'File %s is neither a GDAL-compatible raster nor vector.'
|
||||
% base_file_path)
|
||||
|
||||
else:
|
||||
raise ValueError(f'File {base_file_path} is neither a GDAL-compatible '
|
||||
'raster nor vector.')
|
||||
driver = gdal.GetDriverByName(target_driver_name)
|
||||
driver.CreateCopy(target_file_path, source_dataset)
|
||||
source_dataset = None
|
||||
|
||||
|
||||
def _interpolate_vector_field_onto_raster(
|
||||
|
|
|
@ -1846,7 +1846,7 @@ def _mask_by_distance(base_raster_path, min_dist, max_dist, out_nodata,
|
|||
|
||||
|
||||
def _create_distance_raster(base_raster_path, base_vector_path,
|
||||
target_dist_raster_path, work_dir):
|
||||
target_dist_raster_path, work_dir, where_clause=None):
|
||||
"""Create and rasterize vector onto a raster, and calculate dist transform.
|
||||
|
||||
Create a raster where the pixel values represent the euclidean distance to
|
||||
|
@ -1858,6 +1858,9 @@ def _create_distance_raster(base_raster_path, base_vector_path,
|
|||
base_vector_path (str): path to vector to be rasterized.
|
||||
target_dist_raster_path (str): path to raster with distance transform.
|
||||
work_dir (str): path to create a temp folder for saving files.
|
||||
where_clause (str): If not None, is an SQL query-like string to filter
|
||||
which features are rasterized. This kwarg is passed to
|
||||
``pygeoprocessing.rasterize``.
|
||||
|
||||
Returns:
|
||||
None
|
||||
|
@ -1885,7 +1888,8 @@ def _create_distance_raster(base_raster_path, base_vector_path,
|
|||
base_vector_path,
|
||||
rasterized_raster_path,
|
||||
burn_values=[1],
|
||||
option_list=["ALL_TOUCHED=TRUE"])
|
||||
option_list=["ALL_TOUCHED=TRUE"],
|
||||
where_clause=where_clause)
|
||||
|
||||
# Calculate euclidean distance transform
|
||||
pygeoprocessing.distance_transform_edt(
|
||||
|
@ -2590,67 +2594,25 @@ def _calculate_distances_land_grid(base_point_vector_path, base_raster_path,
|
|||
# A list to hold the land to grid distances in order for each point
|
||||
# features 'L2G' field
|
||||
l2g_dist = []
|
||||
# A list to hold the individual distance transform path's in order
|
||||
# A list to hold the individual distance transform paths in order
|
||||
land_point_dist_raster_path_list = []
|
||||
|
||||
# Get the original layer definition which holds needed attribute values
|
||||
base_layer_defn = base_point_layer.GetLayerDefn()
|
||||
file_ext, driver_name = _get_file_ext_and_driver_name(
|
||||
base_point_vector_path)
|
||||
output_driver = ogr.GetDriverByName(driver_name)
|
||||
single_feature_vector_path = os.path.join(
|
||||
temp_dir, 'single_feature' + file_ext)
|
||||
target_vector = output_driver.CreateDataSource(single_feature_vector_path)
|
||||
|
||||
# Create the new layer for target_vector using same name and
|
||||
# geometry type from base_vector as well as spatial reference
|
||||
target_layer = target_vector.CreateLayer(base_layer_defn.GetName(),
|
||||
base_point_layer.GetSpatialRef(),
|
||||
base_layer_defn.GetGeomType())
|
||||
|
||||
# Get the number of fields in original_layer
|
||||
base_field_count = base_layer_defn.GetFieldCount()
|
||||
|
||||
# For every field, create a duplicate field and add it to the new
|
||||
# shapefiles layer
|
||||
for fld_index in range(base_field_count):
|
||||
base_field = base_layer_defn.GetFieldDefn(fld_index)
|
||||
target_field = ogr.FieldDefn(base_field.GetName(),
|
||||
base_field.GetType())
|
||||
# NOT setting the WIDTH or PRECISION because that seems to be
|
||||
# unneeded and causes interesting OGR conflicts
|
||||
target_layer.CreateField(target_field)
|
||||
|
||||
fid_field = base_point_layer.GetFIDColumn()
|
||||
if not fid_field:
|
||||
fid_field = 'FID'
|
||||
# Create a new shapefile with only one feature to burn onto a raster
|
||||
# in order to get the distance transform based on that one feature
|
||||
for feature_index, point_feature in enumerate(base_point_layer):
|
||||
# Get the point features land to grid value and add it to the list
|
||||
field_index = point_feature.GetFieldIndex('L2G')
|
||||
l2g_dist.append(float(point_feature.GetField(field_index)))
|
||||
l2g_dist.append(float(point_feature.GetField('L2G')))
|
||||
|
||||
# Copy original_datasource's feature and set as new shapes feature
|
||||
output_feature = ogr.Feature(feature_def=target_layer.GetLayerDefn())
|
||||
|
||||
# Since the original feature is of interest add its fields and
|
||||
# Values to the new feature from the intersecting geometries
|
||||
# The False in SetFrom() signifies that the fields must match
|
||||
# exactly
|
||||
output_feature.SetFrom(point_feature, False)
|
||||
target_layer.CreateFeature(output_feature)
|
||||
target_vector.SyncToDisk()
|
||||
target_layer.DeleteFeature(point_feature.GetFID())
|
||||
|
||||
dist_raster_path = os.path.join(temp_dir,
|
||||
'dist_%s.tif' % feature_index)
|
||||
_create_distance_raster(base_raster_path, single_feature_vector_path,
|
||||
dist_raster_path, work_dir)
|
||||
dist_raster_path = os.path.join(temp_dir, f'dist_{feature_index}.tif')
|
||||
_create_distance_raster(
|
||||
base_raster_path, base_point_vector_path, dist_raster_path,
|
||||
work_dir, where_clause=f'{fid_field}={point_feature.GetFID()}')
|
||||
# Add each features distance transform result to list
|
||||
land_point_dist_raster_path_list.append(dist_raster_path)
|
||||
|
||||
target_layer = None
|
||||
target_vector = None
|
||||
base_point_layer = None
|
||||
base_point_vector = None
|
||||
l2g_dist_array = numpy.array(l2g_dist)
|
||||
|
||||
def _min_land_ocean_dist(*grid_distances):
|
||||
|
|
|
@ -6,13 +6,14 @@ import os
|
|||
|
||||
import pandas
|
||||
import numpy
|
||||
from osgeo import gdal
|
||||
import pygeoprocessing
|
||||
|
||||
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'annual_water_yield')
|
||||
SAMPLE_DATA = os.path.join(REGRESSION_DATA, 'input')
|
||||
|
||||
gdal.UseExceptions()
|
||||
|
||||
class AnnualWaterYieldTests(unittest.TestCase):
|
||||
"""Regression Tests for Annual Water Yield Model."""
|
||||
|
|
|
@ -12,6 +12,7 @@ import numpy.random
|
|||
import numpy.testing
|
||||
import pygeoprocessing
|
||||
|
||||
gdal.UseExceptions()
|
||||
|
||||
def make_simple_raster(base_raster_path, fill_val, nodata_val):
|
||||
"""Create a 10x10 raster on designated path with fill value.
|
||||
|
|
|
@ -10,12 +10,13 @@ import json
|
|||
import importlib
|
||||
import uuid
|
||||
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
from osgeo import gdal
|
||||
gdal.UseExceptions()
|
||||
|
||||
@contextlib.contextmanager
|
||||
def redirect_stdout():
|
||||
|
|
|
@ -17,6 +17,7 @@ from natcap.invest import validation
|
|||
from osgeo import gdal
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||
'coastal_blue_carbon')
|
||||
|
|
|
@ -19,7 +19,10 @@ from shapely.geometry import MultiLineString
|
|||
from shapely.geometry import MultiPolygon
|
||||
from shapely.geometry import Point
|
||||
from shapely.geometry import Polygon
|
||||
import logging
|
||||
import sys
|
||||
|
||||
# gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||
'coastal_vulnerability')
|
||||
|
@ -1744,39 +1747,39 @@ def make_vector_of_invalid_geoms(target_vector_path):
|
|||
'POLYGON((0 0, 10 0, 10 10, 0 10, 0 0),(5 2,5 7,10 7, 10 2, 5 2))')
|
||||
assert not invalid_shared_edge_polygon.IsValid()
|
||||
|
||||
# 3: Dangling edge - fixed by buffer
|
||||
dangle_geom = ('POLYGON((100 100, 110 100, 115 105, 110 100, 110 110, '
|
||||
'100 110, 100 100))')
|
||||
invalid_dangling_edge_polygon = ogr.CreateGeometryFromWkt(dangle_geom)
|
||||
assert not invalid_dangling_edge_polygon.IsValid()
|
||||
# # 3: Dangling edge - fixed by buffer
|
||||
# dangle_geom = ('POLYGON((100 100, 110 100, 115 105, 110 100, 110 110, '
|
||||
# '100 110, 100 100))')
|
||||
# invalid_dangling_edge_polygon = ogr.CreateGeometryFromWkt(dangle_geom)
|
||||
# assert not invalid_dangling_edge_polygon.IsValid()
|
||||
|
||||
# One invalid geom that cannot be loaded by shapely or fixed by buffer
|
||||
# We expect this polygon to be skipped by the CV functions being tested.
|
||||
# 4: invalid open ring polygon
|
||||
invalid_open_ring_polygon = ogr.CreateGeometryFromWkt(
|
||||
'POLYGON ((2 -2, 6 -2, 6 -6, 2 -6))')
|
||||
assert not invalid_open_ring_polygon.IsValid()
|
||||
# # One invalid geom that cannot be loaded by shapely or fixed by buffer
|
||||
# # We expect this polygon to be skipped by the CV functions being tested.
|
||||
# # 4: invalid open ring polygon
|
||||
# invalid_open_ring_polygon = ogr.CreateGeometryFromWkt(
|
||||
# 'POLYGON ((2 -2, 6 -2, 6 -6, 2 -6))')
|
||||
# assert not invalid_open_ring_polygon.IsValid()
|
||||
|
||||
gpkg_driver = gdal.GetDriverByName('GPKG')
|
||||
srs = osr.SpatialReference()
|
||||
srs.ImportFromEPSG(32731) # WGS84/UTM zone 31s
|
||||
target_vector = gpkg_driver.Create(
|
||||
target_vector_path, 0, 0, 0, gdal.GDT_Unknown)
|
||||
target_layer = target_vector.CreateLayer(
|
||||
'target_layer', srs, ogr.wkbUnknown)
|
||||
# gpkg_driver = gdal.GetDriverByName('GPKG')
|
||||
# srs = osr.SpatialReference()
|
||||
# srs.ImportFromEPSG(32731) # WGS84/UTM zone 31s
|
||||
# target_vector = gpkg_driver.Create(
|
||||
# target_vector_path, 0, 0, 0, gdal.GDT_Unknown)
|
||||
# target_layer = target_vector.CreateLayer(
|
||||
# 'target_layer', srs, ogr.wkbUnknown)
|
||||
|
||||
target_layer.StartTransaction()
|
||||
input_geom_list = [invalid_bowtie_polygon,
|
||||
invalid_shared_edge_polygon,
|
||||
invalid_dangling_edge_polygon,
|
||||
invalid_open_ring_polygon]
|
||||
for geometry in input_geom_list:
|
||||
outflow_feature = ogr.Feature(target_layer.GetLayerDefn())
|
||||
outflow_feature.SetGeometry(geometry)
|
||||
target_layer.CreateFeature(outflow_feature)
|
||||
target_layer.CommitTransaction()
|
||||
# target_layer.StartTransaction()
|
||||
# input_geom_list = [invalid_bowtie_polygon,
|
||||
# invalid_shared_edge_polygon,
|
||||
# invalid_dangling_edge_polygon,
|
||||
# invalid_open_ring_polygon]
|
||||
# for geometry in input_geom_list:
|
||||
# outflow_feature = ogr.Feature(target_layer.GetLayerDefn())
|
||||
# outflow_feature.SetGeometry(geometry)
|
||||
# target_layer.CreateFeature(outflow_feature)
|
||||
# target_layer.CommitTransaction()
|
||||
|
||||
target_layer = None
|
||||
target_vector = None
|
||||
# target_layer = None
|
||||
# target_vector = None
|
||||
|
||||
return len(input_geom_list)
|
||||
# return len(input_geom_list)
|
||||
|
|
|
@ -9,6 +9,7 @@ from osgeo import gdal
|
|||
import pandas
|
||||
import pygeoprocessing
|
||||
|
||||
gdal.UseExceptions()
|
||||
MODEL_DATA_PATH = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||
'crop_production_model', 'model_data')
|
||||
|
|
|
@ -18,6 +18,7 @@ import shapely.geometry
|
|||
from osgeo import gdal
|
||||
from osgeo import ogr
|
||||
|
||||
gdal.UseExceptions()
|
||||
_TEST_FILE_CWD = os.path.dirname(os.path.abspath(__file__))
|
||||
DATA_DIR = os.path.join(_TEST_FILE_CWD,
|
||||
'..', 'data', 'invest-test-data', 'data_stack')
|
||||
|
|
|
@ -19,6 +19,7 @@ from shapely.geometry import box
|
|||
from shapely.geometry import MultiPoint
|
||||
from shapely.geometry import Point
|
||||
|
||||
gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||
'delineateit')
|
||||
|
|
|
@ -7,7 +7,7 @@ import os
|
|||
from osgeo import gdal
|
||||
import numpy
|
||||
|
||||
|
||||
gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||
'forest_carbon_edge_effect')
|
||||
|
|
|
@ -12,6 +12,7 @@ from osgeo import ogr
|
|||
from osgeo import osr
|
||||
from shapely.geometry import Polygon
|
||||
|
||||
gdal.UseExceptions()
|
||||
|
||||
def make_raster_from_array(
|
||||
base_array, base_raster_path, nodata_val=-1, gdal_type=gdal.GDT_Int32):
|
||||
|
|
|
@ -18,6 +18,7 @@ from osgeo import gdal
|
|||
from osgeo import ogr
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
ORIGIN = (1180000.0, 690000.0)
|
||||
_SRS = osr.SpatialReference()
|
||||
_SRS.ImportFromEPSG(26910) # UTM zone 10N
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
import unittest
|
||||
import os
|
||||
|
||||
from osgeo import gdal
|
||||
gdal.UseExceptions()
|
||||
|
||||
class FileRegistryTests(unittest.TestCase):
|
||||
"""Tests for the InVEST file registry builder."""
|
||||
|
|
|
@ -4,7 +4,9 @@ import unittest
|
|||
|
||||
import pint
|
||||
from natcap.invest.model_metadata import MODEL_METADATA
|
||||
from osgeo import gdal
|
||||
|
||||
gdal.UseExceptions()
|
||||
valid_nested_types = {
|
||||
None: { # if no parent type (arg is top-level), then all types are valid
|
||||
'boolean',
|
||||
|
|
|
@ -12,6 +12,7 @@ from osgeo import gdal
|
|||
from osgeo import ogr
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'ndr')
|
||||
|
||||
|
|
|
@ -7,9 +7,11 @@ import unittest
|
|||
import numpy
|
||||
import pygeoprocessing
|
||||
import shapely.geometry
|
||||
from osgeo import gdal
|
||||
from osgeo import ogr
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'pollination')
|
||||
|
||||
|
|
|
@ -28,6 +28,7 @@ import warnings
|
|||
|
||||
from natcap.invest import utils
|
||||
|
||||
gdal.UseExceptions()
|
||||
Pyro4.config.SERIALIZER = 'marshal' # allow null bytes in strings
|
||||
|
||||
REGRESSION_DATA = os.path.join(
|
||||
|
|
|
@ -9,6 +9,7 @@ import numpy
|
|||
from osgeo import gdal
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
|
||||
class RouteDEMTests(unittest.TestCase):
|
||||
"""Tests for RouteDEM with Pygeoprocessing 1.x routing API."""
|
||||
|
|
|
@ -5,7 +5,9 @@ import shutil
|
|||
import os
|
||||
|
||||
import pandas
|
||||
from osgeo import gdal
|
||||
|
||||
gdal.UseExceptions()
|
||||
TEST_DATA_DIR = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||
'scenario_gen_proximity')
|
||||
|
|
|
@ -14,6 +14,7 @@ from shapely.geometry import LineString
|
|||
from shapely.geometry import Point
|
||||
from shapely.geometry import Polygon
|
||||
|
||||
gdal.UseExceptions()
|
||||
_SRS = osr.SpatialReference()
|
||||
_SRS.ImportFromEPSG(32731) # WGS84 / UTM zone 31s
|
||||
WKT = _SRS.ExportToWkt()
|
||||
|
|
|
@ -9,6 +9,7 @@ import pygeoprocessing
|
|||
from osgeo import gdal
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'sdr')
|
||||
SAMPLE_DATA = os.path.join(REGRESSION_DATA, 'input')
|
||||
|
|
|
@ -10,6 +10,7 @@ from osgeo import gdal
|
|||
from osgeo import ogr
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||
'seasonal_water_yield')
|
||||
|
|
|
@ -2,7 +2,9 @@ import unittest
|
|||
|
||||
from natcap.invest import spec_utils
|
||||
from natcap.invest.unit_registry import u
|
||||
from osgeo import gdal
|
||||
|
||||
gdal.UseExceptions()
|
||||
|
||||
class TestSpecUtils(unittest.TestCase):
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ import pygeoprocessing
|
|||
from pygeoprocessing.geoprocessing_core import (
|
||||
DEFAULT_GTIFF_CREATION_TUPLE_OPTIONS as opts_tuple)
|
||||
|
||||
|
||||
gdal.UseExceptions()
|
||||
TEST_DATA = os.path.join(os.path.dirname(
|
||||
__file__), '..', 'data', 'invest-test-data', 'stormwater')
|
||||
|
||||
|
|
|
@ -10,7 +10,9 @@ from unittest.mock import patch
|
|||
from babel.messages import Catalog, mofile
|
||||
import natcap.invest
|
||||
from natcap.invest import validation
|
||||
from osgeo import gdal
|
||||
|
||||
gdal.UseExceptions()
|
||||
TEST_LANG = 'll'
|
||||
|
||||
# assign to local variable so that it won't be changed by translation
|
||||
|
|
|
@ -8,6 +8,7 @@ import numpy
|
|||
import pandas
|
||||
from osgeo import gdal
|
||||
|
||||
gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'ucm')
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ from osgeo import gdal
|
|||
from osgeo import ogr
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
|
||||
class UFRMTests(unittest.TestCase):
|
||||
"""Tests for the Urban Flood Risk Mitigation Model."""
|
||||
|
|
|
@ -6,7 +6,9 @@ import unittest
|
|||
from unittest.mock import Mock, patch
|
||||
|
||||
from natcap.invest import ui_server
|
||||
from osgeo import gdal
|
||||
|
||||
gdal.UseExceptions()
|
||||
TEST_DATA_PATH = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data')
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ from osgeo import gdal
|
|||
from osgeo import ogr
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
_DEFAULT_ORIGIN = (444720, 3751320)
|
||||
_DEFAULT_PIXEL_SIZE = (30, -30)
|
||||
_DEFAULT_EPSG = 3116
|
||||
|
|
|
@ -13,6 +13,7 @@ import shapely.geometry
|
|||
import numpy
|
||||
import numpy.testing
|
||||
|
||||
gdal.UseExceptions()
|
||||
|
||||
class UsageLoggingTests(unittest.TestCase):
|
||||
"""Tests for the InVEST usage logging framework."""
|
||||
|
|
|
@ -25,6 +25,7 @@ from osgeo import osr
|
|||
from shapely.geometry import Point
|
||||
from shapely.geometry import Polygon
|
||||
|
||||
gdal.UseExceptions()
|
||||
|
||||
class SuffixUtilsTests(unittest.TestCase):
|
||||
"""Tests for natcap.invest.utils.make_suffix_string."""
|
||||
|
@ -412,17 +413,22 @@ class GDALWarningsLoggingTests(unittest.TestCase):
|
|||
|
||||
logfile = os.path.join(self.workspace, 'logfile.txt')
|
||||
|
||||
# this warning should go to stdout.
|
||||
gdal.Open('this_file_should_not_exist.tif')
|
||||
invalid_polygon = ogr.CreateGeometryFromWkt(
|
||||
'POLYGON ((-20 -20, -16 -20, -20 -16, -16 -16, -20 -20))')
|
||||
|
||||
# This produces a GDAL warning that does not raise an
|
||||
# exception with UseExceptions(). Without capture_gdal_logging,
|
||||
# it will be printed directly to stderr
|
||||
invalid_polygon.IsValid()
|
||||
|
||||
with utils.log_to_file(logfile) as handler:
|
||||
with utils.capture_gdal_logging():
|
||||
# warning should be captured.
|
||||
gdal.Open('file_file_should_also_not_exist.tif')
|
||||
invalid_polygon.IsValid()
|
||||
handler.flush()
|
||||
|
||||
# warning should go to stdout
|
||||
gdal.Open('this_file_should_not_exist.tif')
|
||||
# warning should go to stderr
|
||||
invalid_polygon.IsValid()
|
||||
|
||||
with open(logfile) as opened_logfile:
|
||||
messages = [msg for msg in opened_logfile.read().split('\n')
|
||||
|
@ -499,7 +505,11 @@ class PrepareWorkspaceTests(unittest.TestCase):
|
|||
with utils.prepare_workspace(workspace,
|
||||
'some_model'):
|
||||
warnings.warn('deprecated', UserWarning)
|
||||
gdal.Open('file should not exist')
|
||||
invalid_polygon = ogr.CreateGeometryFromWkt(
|
||||
'POLYGON ((-20 -20, -16 -20, -20 -16, -16 -16, -20 -20))')
|
||||
# This produces a GDAL warning that does not raise an
|
||||
# exception with UseExceptions()
|
||||
invalid_polygon.IsValid()
|
||||
|
||||
self.assertTrue(os.path.exists(workspace))
|
||||
logfile_glob = glob.glob(os.path.join(workspace, '*.txt'))
|
||||
|
@ -509,11 +519,9 @@ class PrepareWorkspaceTests(unittest.TestCase):
|
|||
with open(logfile_glob[0]) as logfile:
|
||||
logfile_text = logfile.read()
|
||||
# all the following strings should be in the logfile.
|
||||
expected_string = (
|
||||
'file should not exist: No such file or directory')
|
||||
self.assertTrue(
|
||||
expected_string in logfile_text) # gdal error captured
|
||||
self.assertEqual(len(re.findall('WARNING', logfile_text)), 1)
|
||||
self.assertTrue( # gdal logging captured
|
||||
'Self-intersection at or near point -18 -18' in logfile_text)
|
||||
self.assertEqual(len(re.findall('WARNING', logfile_text)), 2)
|
||||
self.assertTrue('Elapsed time:' in logfile_text)
|
||||
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@ from osgeo import gdal
|
|||
from osgeo import ogr
|
||||
from osgeo import osr
|
||||
|
||||
gdal.UseExceptions()
|
||||
|
||||
class SpatialOverlapTest(unittest.TestCase):
|
||||
"""Test Spatial Overlap."""
|
||||
|
|
|
@ -18,6 +18,7 @@ from shapely.geometry import Point
|
|||
from natcap.invest import utils
|
||||
import pygeoprocessing
|
||||
|
||||
gdal.UseExceptions()
|
||||
REGRESSION_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'wave_energy')
|
||||
SAMPLE_DATA = os.path.join(REGRESSION_DATA, 'input')
|
||||
|
|
|
@ -17,6 +17,7 @@ from osgeo import osr
|
|||
|
||||
import pygeoprocessing
|
||||
|
||||
gdal.UseExceptions()
|
||||
SAMPLE_DATA = os.path.join(
|
||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'wind_energy',
|
||||
'input')
|
||||
|
|
Loading…
Reference in New Issue