Merge branch 'feature/routing-refactor' into feature/routing-refactor-compare
This commit is contained in:
commit
3c9b795d06
|
@ -53,7 +53,24 @@ jobs:
|
||||||
perl -0777 -i -pe \
|
perl -0777 -i -pe \
|
||||||
"s/Unreleased Changes\n------------------/..\n Unreleased Changes\n ------------------\n\n${HEADER}\n${UNDERLINE}/g" \
|
"s/Unreleased Changes\n------------------/..\n Unreleased Changes\n ------------------\n\n${HEADER}\n${UNDERLINE}/g" \
|
||||||
HISTORY.rst
|
HISTORY.rst
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pip install rst2html5
|
||||||
|
|
||||||
|
- name: Generate changelog.html
|
||||||
|
run: rst2html5 HISTORY.rst workbench/changelog.html
|
||||||
|
|
||||||
|
- name: Update package.json version
|
||||||
|
uses: BellCubeDev/update-package-version-by-release-tag@v2
|
||||||
|
with:
|
||||||
|
version: ${{ inputs.version }}
|
||||||
|
package-json-path: workbench/package.json
|
||||||
|
|
||||||
|
- name: Commit updated HISTORY.rst, changelog.html, and package.json
|
||||||
|
run: |
|
||||||
git add HISTORY.rst
|
git add HISTORY.rst
|
||||||
|
git add workbench/changelog.html
|
||||||
|
git add workbench/package.json
|
||||||
git commit -m "Committing the $VERSION release."
|
git commit -m "Committing the $VERSION release."
|
||||||
|
|
||||||
- name: Tag and push
|
- name: Tag and push
|
||||||
|
|
|
@ -99,7 +99,7 @@ jobs:
|
||||||
twine upload \
|
twine upload \
|
||||||
--username="__token__" \
|
--username="__token__" \
|
||||||
--password=${{ secrets.PYPI_NATCAP_INVEST_TOKEN }} \
|
--password=${{ secrets.PYPI_NATCAP_INVEST_TOKEN }} \
|
||||||
artifacts/natcap.invest*
|
artifacts/natcap.invest* artifacts/natcap_invest*
|
||||||
|
|
||||||
- name: Roll back on failure
|
- name: Roll back on failure
|
||||||
if: failure()
|
if: failure()
|
||||||
|
|
20
HISTORY.rst
20
HISTORY.rst
|
@ -48,6 +48,12 @@ Unreleased Changes
|
||||||
reflect changes in how InVEST is installed on modern systems, and also to
|
reflect changes in how InVEST is installed on modern systems, and also to
|
||||||
include images of the InVEST workbench instead of just broken links.
|
include images of the InVEST workbench instead of just broken links.
|
||||||
https://github.com/natcap/invest/issues/1660
|
https://github.com/natcap/invest/issues/1660
|
||||||
|
* Updated translations for Spanish and Chinese
|
||||||
|
* natcap.invest now works with (and requires) ``gdal.UseExceptions``. A
|
||||||
|
``FutureWarning`` is raised on import if GDAL exceptions are not enabled.
|
||||||
|
* Fixed an issue on Windows where GDAL fails to find its DLLs due to
|
||||||
|
an interfering GDAL installation on the PATH, such as from anaconda.
|
||||||
|
https://github.com/natcap/invest/issues/1643
|
||||||
* Workbench
|
* Workbench
|
||||||
* Several small updates to the model input form UI to improve usability
|
* Several small updates to the model input form UI to improve usability
|
||||||
and visual consistency (https://github.com/natcap/invest/issues/912).
|
and visual consistency (https://github.com/natcap/invest/issues/912).
|
||||||
|
@ -61,6 +67,8 @@ Unreleased Changes
|
||||||
(https://github.com/natcap/invest/issues/1609).
|
(https://github.com/natcap/invest/issues/1609).
|
||||||
* Improved error handling when a datastack cannot be saved with relative
|
* Improved error handling when a datastack cannot be saved with relative
|
||||||
paths across drives (https://github.com/natcap/invest/issues/1608).
|
paths across drives (https://github.com/natcap/invest/issues/1608).
|
||||||
|
* The InVEST changelog now displays in the Workbench the first time a new
|
||||||
|
version is launched (https://github.com/natcap/invest/issues/1368).
|
||||||
* Coastal Vulnerability
|
* Coastal Vulnerability
|
||||||
* Fixed a regression where an AOI with multiple features could raise a
|
* Fixed a regression where an AOI with multiple features could raise a
|
||||||
TypeError after intersecting with the landmass polygon.
|
TypeError after intersecting with the landmass polygon.
|
||||||
|
@ -73,6 +81,18 @@ Unreleased Changes
|
||||||
(https://github.com/natcap/invest/issues/1615).
|
(https://github.com/natcap/invest/issues/1615).
|
||||||
* Rarity values are now output in CSV format (as well as in raster format)
|
* Rarity values are now output in CSV format (as well as in raster format)
|
||||||
(https://github.com/natcap/invest/issues/721).
|
(https://github.com/natcap/invest/issues/721).
|
||||||
|
* Pollination
|
||||||
|
* Fixed an issue with nodata handling that was causing some outputs to be
|
||||||
|
filled either with the float32 value for positive infinity, or else with
|
||||||
|
a value very close to it. https://github.com/natcap/invest/issues/1635
|
||||||
|
* While working on https://github.com/natcap/invest/issues/1635, we also
|
||||||
|
updated the stated dtype of most pollination model outputs to be float32
|
||||||
|
instead of the float64 dtype that was being assumed previously. This
|
||||||
|
will result in smaller output filesizes with minimal loss of precision.
|
||||||
|
* Seasonal Water Yield
|
||||||
|
* Added support for zero padding in month numbers in ET and precipitation
|
||||||
|
file names (i.e., users can now name their file Precip_01.tif).
|
||||||
|
(https://github.com/natcap/invest/issues/1166)
|
||||||
* Urban Flood Risk
|
* Urban Flood Risk
|
||||||
* Fields present on the input AOI vector are now retained in the output.
|
* Fields present on the input AOI vector are now retained in the output.
|
||||||
(https://github.com/natcap/invest/issues/1600)
|
(https://github.com/natcap/invest/issues/1600)
|
||||||
|
|
9
Makefile
9
Makefile
|
@ -10,7 +10,7 @@ GIT_TEST_DATA_REPO_REV := 324abde73e1d770ad75921466ecafd1ec6297752
|
||||||
|
|
||||||
GIT_UG_REPO := https://github.com/natcap/invest.users-guide
|
GIT_UG_REPO := https://github.com/natcap/invest.users-guide
|
||||||
GIT_UG_REPO_PATH := doc/users-guide
|
GIT_UG_REPO_PATH := doc/users-guide
|
||||||
GIT_UG_REPO_REV := f203ec069f9f03560c9a85b268e67ebb6b994953
|
GIT_UG_REPO_REV := 5ee3616d4549baf3b1e44e0fcef485145389e29a
|
||||||
|
|
||||||
ENV = "./env"
|
ENV = "./env"
|
||||||
ifeq ($(OS),Windows_NT)
|
ifeq ($(OS),Windows_NT)
|
||||||
|
@ -66,6 +66,7 @@ PYTHON_ARCH := $(shell $(PYTHON) -c "import sys; print('x86' if sys.maxsize <= 2
|
||||||
|
|
||||||
GSUTIL := gsutil
|
GSUTIL := gsutil
|
||||||
SIGNTOOL := SignTool
|
SIGNTOOL := SignTool
|
||||||
|
RST2HTML5 := rst2html5
|
||||||
|
|
||||||
# local directory names
|
# local directory names
|
||||||
DIST_DIR := dist
|
DIST_DIR := dist
|
||||||
|
@ -73,6 +74,8 @@ DIST_DATA_DIR := $(DIST_DIR)/data
|
||||||
BUILD_DIR := build
|
BUILD_DIR := build
|
||||||
WORKBENCH := workbench
|
WORKBENCH := workbench
|
||||||
WORKBENCH_DIST_DIR := $(WORKBENCH)/dist
|
WORKBENCH_DIST_DIR := $(WORKBENCH)/dist
|
||||||
|
CHANGELOG_SRC := HISTORY.rst
|
||||||
|
CHANGELOG_DEST := $(WORKBENCH)/changelog.html
|
||||||
|
|
||||||
# The fork name and user here are derived from the git path on github.
|
# The fork name and user here are derived from the git path on github.
|
||||||
# The fork name will need to be set manually (e.g. make FORKNAME=natcap/invest)
|
# The fork name will need to be set manually (e.g. make FORKNAME=natcap/invest)
|
||||||
|
@ -141,6 +144,7 @@ help:
|
||||||
@echo " binaries to build pyinstaller binaries"
|
@echo " binaries to build pyinstaller binaries"
|
||||||
@echo " apidocs to build HTML API documentation"
|
@echo " apidocs to build HTML API documentation"
|
||||||
@echo " userguide to build HTML version of the users guide"
|
@echo " userguide to build HTML version of the users guide"
|
||||||
|
@echo " changelog to build HTML version of the changelog"
|
||||||
@echo " python_packages to build natcap.invest wheel and source distributions"
|
@echo " python_packages to build natcap.invest wheel and source distributions"
|
||||||
@echo " codesign_mac to sign the mac disk image using the codesign utility"
|
@echo " codesign_mac to sign the mac disk image using the codesign utility"
|
||||||
@echo " codesign_windows to sign the windows installer using the SignTool utility"
|
@echo " codesign_windows to sign the windows installer using the SignTool utility"
|
||||||
|
@ -366,6 +370,9 @@ deploy:
|
||||||
@echo "Application binaries (if they were created) can be downloaded from:"
|
@echo "Application binaries (if they were created) can be downloaded from:"
|
||||||
@echo " * $(DOWNLOAD_DIR_URL)"
|
@echo " * $(DOWNLOAD_DIR_URL)"
|
||||||
|
|
||||||
|
changelog:
|
||||||
|
$(RST2HTML5) $(CHANGELOG_SRC) $(CHANGELOG_DEST)
|
||||||
|
|
||||||
# Notes on Makefile development
|
# Notes on Makefile development
|
||||||
#
|
#
|
||||||
# * Use the -drR to show the decision tree (and none of the implicit rules)
|
# * Use the -drR to show the decision tree (and none of the implicit rules)
|
||||||
|
|
86
README.rst
86
README.rst
|
@ -35,7 +35,6 @@ General Information
|
||||||
Dependencies
|
Dependencies
|
||||||
------------
|
------------
|
||||||
|
|
||||||
Run ``make check`` to test if all required dependencies are installed on your system.
|
|
||||||
OS-specific installation instructions are found either online at
|
OS-specific installation instructions are found either online at
|
||||||
http://invest.readthedocs.io/en/latest/installing.html or locally at ``doc/api-docs/installing.rst``.
|
http://invest.readthedocs.io/en/latest/installing.html or locally at ``doc/api-docs/installing.rst``.
|
||||||
|
|
||||||
|
@ -57,30 +56,50 @@ Or on Windows, use the following instead from a CMD prompt::
|
||||||
> make env
|
> make env
|
||||||
> .\env\bin\activate
|
> .\env\bin\activate
|
||||||
|
|
||||||
This makefile target is included for convenience ... you may of course choose to
|
This makefile target is included for convenience. It uses ``conda`` and installs packages from ``conda-forge``.
|
||||||
manage your own virtual environment. ``requirements.txt``,
|
It also uses the `-p` flag with `conda create`, creating a `./env` folder containing the environment.
|
||||||
``requirements-dev.txt`` and ``requirements-docs.txt`` list the python
|
|
||||||
dependencies needed.
|
|
||||||
|
|
||||||
Using a different environment name
|
Using a different environment folder name
|
||||||
""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""""
|
||||||
If you prefer a different name for your environment, you may pass the environment name as
|
If you prefer a different path for your environment, you may pass the environment path as
|
||||||
a parameter to make::
|
a parameter to make::
|
||||||
|
|
||||||
$ make ENV=myEnv env
|
$ make ENV=myEnv env
|
||||||
|
|
||||||
You could then activate the environment created at ``myEnv``.
|
You could then activate the environment created at ``./myEnv``.
|
||||||
|
|
||||||
|
|
||||||
Using a different environment management tool
|
Using a different environment management tool
|
||||||
"""""""""""""""""""""""""""""""""""""""""""""
|
"""""""""""""""""""""""""""""""""""""""""""""
|
||||||
The InVEST Makefile uses ``virtualenv`` to set up an environment, but this is
|
You may of course choose to manage your own virtual environment without using the Makefile.
|
||||||
not the only `environment management tool out there
|
|
||||||
<https://packaging.python.org/tutorials/installing-packages/#creating-virtual-environments>`_.
|
|
||||||
You may elect to manage your virtual environment a different way, independent
|
|
||||||
of ``make env``. The only requirement for the build process is that the required
|
|
||||||
tools are available on your PATH and the required python packages can be imported.
|
|
||||||
|
|
||||||
|
We suggest using ``conda`` or ``mamba`` and ``conda-forge``.
|
||||||
|
|
||||||
|
``requirements.txt``, ``requirements-dev.txt`` and ``requirements-docs.txt`` list the python
|
||||||
|
dependencies needed.
|
||||||
|
|
||||||
|
Installing ``natcap.invest`` from local source code
|
||||||
|
"""""""""""""""""""""""""""""""""""""""""""""""""""
|
||||||
|
From an activated virtual environment, it's safest to uninstall any existing installation
|
||||||
|
and then install `natcap.invest`::
|
||||||
|
|
||||||
|
$ pip uninstall natcap.invest
|
||||||
|
$ make install
|
||||||
|
|
||||||
|
In practice, it can be convenient to use an "editable install" instead to avoid needing
|
||||||
|
to uninstall & re-install after making changes to source code::
|
||||||
|
|
||||||
|
$ pip install -e .
|
||||||
|
|
||||||
|
Note that with an editable install any changes to non-Python (Cython, C++) files will
|
||||||
|
require compilation using one of the above installation methods.
|
||||||
|
|
||||||
|
*The Workbench is not part of the* ``natcap.invest`` *Python package. See*
|
||||||
|
``workbench/readme.md`` *for developer details.*
|
||||||
|
|
||||||
|
A successful ``natcap.invest`` installation will include the InVEST CLI::
|
||||||
|
|
||||||
|
$ invest list
|
||||||
|
|
||||||
Building InVEST Distributions
|
Building InVEST Distributions
|
||||||
-----------------------------
|
-----------------------------
|
||||||
|
@ -150,6 +169,13 @@ To build the user's guide::
|
||||||
This will build HTML and PDF documentation, writing them to ``dist/userguide``
|
This will build HTML and PDF documentation, writing them to ``dist/userguide``
|
||||||
and ``dist/InVEST_*_Documentation.pdf``, respectively.
|
and ``dist/InVEST_*_Documentation.pdf``, respectively.
|
||||||
|
|
||||||
|
The User's Guide is maintained in a separate git repository. InVEST will build
|
||||||
|
the User's Guide with the commit defined in the ``Makefile``::
|
||||||
|
|
||||||
|
GIT_UG_REPO := https://github.com/natcap/invest.users-guide
|
||||||
|
GIT_UG_REPO_PATH := doc/users-guide
|
||||||
|
GIT_UG_REPO_REV := f203ec069f9f03560c9a85b268e67ebb6b994953
|
||||||
|
|
||||||
|
|
||||||
API Documentation
|
API Documentation
|
||||||
+++++++++++++++++
|
+++++++++++++++++
|
||||||
|
@ -173,22 +199,12 @@ build zip archives of the sample data::
|
||||||
|
|
||||||
This will write the data zipfiles to ``dist/data``. ``git`` command is needed.
|
This will write the data zipfiles to ``dist/data``. ``git`` command is needed.
|
||||||
|
|
||||||
Single archive of sample data
|
Sample data is tracked in a ``git-lfs`` repo and will be packaged based on the commit
|
||||||
+++++++++++++++++++++++++++++
|
defined in the ``Makefile``::
|
||||||
|
|
||||||
For trainings, it is especially convenient to distribute all sample data as a
|
|
||||||
single zip archive. As an added bonus, this single zip archive can be provided
|
|
||||||
to the InVEST installer for Windows as either the 'Advanced' input on the front
|
|
||||||
page of the installer, or by a CLI flag, thus preventing the installer from
|
|
||||||
downloading datasets from the internet. See
|
|
||||||
``installer/windows/invest_installer.nsi`` for more details. To build a single
|
|
||||||
archive of all InVEST sample data::
|
|
||||||
|
|
||||||
$ make sampledata_single
|
|
||||||
|
|
||||||
This will write the single sampledata archive to
|
|
||||||
``dist/InVEST_*_sample_data.zip``.
|
|
||||||
|
|
||||||
|
GIT_SAMPLE_DATA_REPO := https://bitbucket.org/natcap/invest-sample-data.git
|
||||||
|
GIT_SAMPLE_DATA_REPO_PATH := $(DATA_DIR)/invest-sample-data
|
||||||
|
GIT_SAMPLE_DATA_REPO_REV := 0f8b41557753dad3670ba8220f41650b51435a93
|
||||||
|
|
||||||
Tests
|
Tests
|
||||||
-----
|
-----
|
||||||
|
@ -202,6 +218,16 @@ To run tests on the suite of Ecosystem Service models in InVEST::
|
||||||
|
|
||||||
$ make test
|
$ make test
|
||||||
|
|
||||||
|
Tests depend on test data that is tracked in a ``git-lfs`` repo defined in the ``Makefile``::
|
||||||
|
|
||||||
|
GIT_TEST_DATA_REPO := https://bitbucket.org/natcap/invest-test-data.git
|
||||||
|
GIT_TEST_DATA_REPO_PATH := $(DATA_DIR)/invest-test-data
|
||||||
|
GIT_TEST_DATA_REPO_REV := 324abde73e1d770ad75921466ecafd1ec6297752
|
||||||
|
|
||||||
|
Test data (and Sample Data) can be retrieved using::
|
||||||
|
|
||||||
|
$ make fetch
|
||||||
|
|
||||||
|
|
||||||
Changing how GNU make runs tests
|
Changing how GNU make runs tests
|
||||||
++++++++++++++++++++++++++++++++
|
++++++++++++++++++++++++++++++++
|
||||||
|
|
|
@ -5,14 +5,17 @@ import sys
|
||||||
os.environ['PROJ_LIB'] = os.path.join(sys._MEIPASS, 'proj')
|
os.environ['PROJ_LIB'] = os.path.join(sys._MEIPASS, 'proj')
|
||||||
|
|
||||||
if platform.system() == 'Darwin':
|
if platform.system() == 'Darwin':
|
||||||
# This allows Qt 5.13+ to start on Big Sur.
|
|
||||||
# See https://bugreports.qt.io/browse/QTBUG-87014
|
|
||||||
# and https://github.com/natcap/invest/issues/384
|
|
||||||
os.environ['QT_MAC_WANTS_LAYER'] = '1'
|
|
||||||
|
|
||||||
# Rtree will look in this directory first for libspatialindex_c.dylib.
|
# Rtree will look in this directory first for libspatialindex_c.dylib.
|
||||||
# In response to issues with github mac binary builds:
|
# In response to issues with github mac binary builds:
|
||||||
# https://github.com/natcap/invest/issues/594
|
# https://github.com/natcap/invest/issues/594
|
||||||
# sys._MEIPASS is the path to where the pyinstaller entrypoint bundle
|
# sys._MEIPASS is the path to where the pyinstaller entrypoint bundle
|
||||||
# lives. See the pyinstaller docs for more details.
|
# lives. See the pyinstaller docs for more details.
|
||||||
os.environ['SPATIALINDEX_C_LIBRARY'] = sys._MEIPASS
|
os.environ['SPATIALINDEX_C_LIBRARY'] = sys._MEIPASS
|
||||||
|
|
||||||
|
if platform.system() == 'Windows':
|
||||||
|
# sys._MEIPASS contains gdal DLLs. It does not otherwise end
|
||||||
|
# up on the PATH, which means that gdal can discover
|
||||||
|
# incompatible DLLs from some other place on the PATH, such
|
||||||
|
# as an anaconda gdal installation.
|
||||||
|
if 'PATH' in os.environ:
|
||||||
|
os.environ['PATH'] = f"{sys._MEIPASS};{os.environ['PATH']}"
|
||||||
|
|
|
@ -26,3 +26,4 @@ requests
|
||||||
coverage
|
coverage
|
||||||
xlwt
|
xlwt
|
||||||
build # pip-only
|
build # pip-only
|
||||||
|
rst2html5
|
||||||
|
|
|
@ -2,3 +2,4 @@ Sphinx>=1.3.1,!=1.7.1
|
||||||
sphinx-rtd-theme
|
sphinx-rtd-theme
|
||||||
sphinx-intl
|
sphinx-intl
|
||||||
sphinx-reredirects
|
sphinx-reredirects
|
||||||
|
pyyaml
|
||||||
|
|
|
@ -4,8 +4,10 @@ import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from gettext import translation
|
from gettext import translation
|
||||||
|
import warnings
|
||||||
|
|
||||||
import babel
|
import babel
|
||||||
|
from osgeo import gdal
|
||||||
|
|
||||||
LOGGER = logging.getLogger('natcap.invest')
|
LOGGER = logging.getLogger('natcap.invest')
|
||||||
LOGGER.addHandler(logging.NullHandler())
|
LOGGER.addHandler(logging.NullHandler())
|
||||||
|
@ -28,6 +30,14 @@ LOCALE_NAME_MAP = {
|
||||||
locale: babel.Locale(locale).display_name for locale in LOCALES
|
locale: babel.Locale(locale).display_name for locale in LOCALES
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if not gdal.GetUseExceptions():
|
||||||
|
warnings.warn(('''
|
||||||
|
natcap.invest requires GDAL exceptions to be enabled. You must
|
||||||
|
call gdal.UseExceptions() to avoid unexpected behavior from
|
||||||
|
natcap.invest. A future version will enable exceptions on import.
|
||||||
|
gdal.UseExceptions() affects global state, so this may affect the
|
||||||
|
behavior of other packages.'''), FutureWarning)
|
||||||
|
|
||||||
|
|
||||||
def set_locale(locale_code):
|
def set_locale(locale_code):
|
||||||
"""Set the `gettext` attribute of natcap.invest.
|
"""Set the `gettext` attribute of natcap.invest.
|
||||||
|
|
|
@ -12,14 +12,15 @@ import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
import natcap.invest
|
from pygeoprocessing.geoprocessing_core import GDALUseExceptions
|
||||||
from natcap.invest import datastack
|
with GDALUseExceptions():
|
||||||
from natcap.invest import model_metadata
|
import natcap.invest
|
||||||
from natcap.invest import set_locale
|
from natcap.invest import datastack
|
||||||
from natcap.invest import spec_utils
|
from natcap.invest import model_metadata
|
||||||
from natcap.invest import ui_server
|
from natcap.invest import set_locale
|
||||||
from natcap.invest import utils
|
from natcap.invest import spec_utils
|
||||||
|
from natcap.invest import ui_server
|
||||||
|
from natcap.invest import utils
|
||||||
|
|
||||||
DEFAULT_EXIT_CODE = 1
|
DEFAULT_EXIT_CODE = 1
|
||||||
LOGGER = logging.getLogger(__name__)
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
@ -218,267 +219,268 @@ def main(user_args=None):
|
||||||
so models may be run in this way without having GUI packages
|
so models may be run in this way without having GUI packages
|
||||||
installed.
|
installed.
|
||||||
"""
|
"""
|
||||||
parser = argparse.ArgumentParser(
|
with GDALUseExceptions():
|
||||||
description=(
|
parser = argparse.ArgumentParser(
|
||||||
'Integrated Valuation of Ecosystem Services and Tradeoffs. '
|
description=(
|
||||||
'InVEST (Integrated Valuation of Ecosystem Services and '
|
'Integrated Valuation of Ecosystem Services and Tradeoffs. '
|
||||||
'Tradeoffs) is a family of tools for quantifying the values of '
|
'InVEST (Integrated Valuation of Ecosystem Services and '
|
||||||
'natural capital in clear, credible, and practical ways. In '
|
'Tradeoffs) is a family of tools for quantifying the values of '
|
||||||
'promising a return (of societal benefits) on investments in '
|
'natural capital in clear, credible, and practical ways. In '
|
||||||
'nature, the scientific community needs to deliver knowledge and '
|
'promising a return (of societal benefits) on investments in '
|
||||||
'tools to quantify and forecast this return. InVEST enables '
|
'nature, the scientific community needs to deliver knowledge and '
|
||||||
'decision-makers to quantify the importance of natural capital, '
|
'tools to quantify and forecast this return. InVEST enables '
|
||||||
'to assess the tradeoffs associated with alternative choices, '
|
'decision-makers to quantify the importance of natural capital, '
|
||||||
'and to integrate conservation and human development. \n\n'
|
'to assess the tradeoffs associated with alternative choices, '
|
||||||
'Older versions of InVEST ran as script tools in the ArcGIS '
|
'and to integrate conservation and human development. \n\n'
|
||||||
'ArcToolBox environment, but have almost all been ported over to '
|
'Older versions of InVEST ran as script tools in the ArcGIS '
|
||||||
'a purely open-source python environment.'),
|
'ArcToolBox environment, but have almost all been ported over to '
|
||||||
prog='invest'
|
'a purely open-source python environment.'),
|
||||||
)
|
prog='invest'
|
||||||
parser.add_argument('--version', action='version',
|
)
|
||||||
version=natcap.invest.__version__)
|
parser.add_argument('--version', action='version',
|
||||||
verbosity_group = parser.add_mutually_exclusive_group()
|
version=natcap.invest.__version__)
|
||||||
verbosity_group.add_argument(
|
verbosity_group = parser.add_mutually_exclusive_group()
|
||||||
'-v', '--verbose', dest='verbosity', default=0, action='count',
|
verbosity_group.add_argument(
|
||||||
help=('Increase verbosity. Affects how much logging is printed to '
|
'-v', '--verbose', dest='verbosity', default=0, action='count',
|
||||||
'the console and (if running in headless mode) how much is '
|
help=('Increase verbosity. Affects how much logging is printed to '
|
||||||
'written to the logfile.'))
|
'the console and (if running in headless mode) how much is '
|
||||||
verbosity_group.add_argument(
|
'written to the logfile.'))
|
||||||
'--debug', dest='log_level', default=logging.ERROR,
|
verbosity_group.add_argument(
|
||||||
action='store_const', const=logging.DEBUG,
|
'--debug', dest='log_level', default=logging.ERROR,
|
||||||
help='Enable debug logging. Alias for -vvv')
|
action='store_const', const=logging.DEBUG,
|
||||||
|
help='Enable debug logging. Alias for -vvv')
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--taskgraph-log-level', dest='taskgraph_log_level', default='ERROR',
|
'--taskgraph-log-level', dest='taskgraph_log_level', default='ERROR',
|
||||||
type=str, choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
|
type=str, choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
|
||||||
help=('Set the logging level for Taskgraph. Affects how much logging '
|
help=('Set the logging level for Taskgraph. Affects how much logging '
|
||||||
'Taskgraph prints to the console and (if running in headless '
|
'Taskgraph prints to the console and (if running in headless '
|
||||||
'mode) how much is written to the logfile.'))
|
'mode) how much is written to the logfile.'))
|
||||||
|
|
||||||
# list the language code and corresponding language name (in that language)
|
# list the language code and corresponding language name (in that language)
|
||||||
supported_languages_string = ', '.join([
|
supported_languages_string = ', '.join([
|
||||||
f'{locale} ({display_name})'
|
f'{locale} ({display_name})'
|
||||||
for locale, display_name in natcap.invest.LOCALE_NAME_MAP.items()])
|
for locale, display_name in natcap.invest.LOCALE_NAME_MAP.items()])
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-L', '--language', default='en',
|
'-L', '--language', default='en',
|
||||||
choices=natcap.invest.LOCALES,
|
choices=natcap.invest.LOCALES,
|
||||||
help=('Choose a language. Model specs, names, and validation messages '
|
help=('Choose a language. Model specs, names, and validation messages '
|
||||||
'will be translated. Log messages are not translated. Value '
|
'will be translated. Log messages are not translated. Value '
|
||||||
'should be an ISO 639-1 language code. Supported options are: '
|
'should be an ISO 639-1 language code. Supported options are: '
|
||||||
f'{supported_languages_string}.'))
|
f'{supported_languages_string}.'))
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(dest='subcommand')
|
subparsers = parser.add_subparsers(dest='subcommand')
|
||||||
|
|
||||||
listmodels_subparser = subparsers.add_parser(
|
listmodels_subparser = subparsers.add_parser(
|
||||||
'list', help='List the available InVEST models')
|
'list', help='List the available InVEST models')
|
||||||
listmodels_subparser.add_argument(
|
listmodels_subparser.add_argument(
|
||||||
'--json', action='store_true', help='Write output as a JSON object')
|
'--json', action='store_true', help='Write output as a JSON object')
|
||||||
|
|
||||||
run_subparser = subparsers.add_parser(
|
run_subparser = subparsers.add_parser(
|
||||||
'run', help='Run an InVEST model')
|
'run', help='Run an InVEST model')
|
||||||
# Recognize '--headless' for backwards compatibility.
|
# Recognize '--headless' for backwards compatibility.
|
||||||
# This arg is otherwise unused.
|
# This arg is otherwise unused.
|
||||||
run_subparser.add_argument(
|
run_subparser.add_argument(
|
||||||
'-l', '--headless', action='store_true',
|
'-l', '--headless', action='store_true',
|
||||||
help=argparse.SUPPRESS)
|
help=argparse.SUPPRESS)
|
||||||
run_subparser.add_argument(
|
run_subparser.add_argument(
|
||||||
'-d', '--datastack', default=None, nargs='?',
|
'-d', '--datastack', default=None, nargs='?',
|
||||||
help=('Run the specified model with this JSON datastack. '
|
help=('Run the specified model with this JSON datastack. '
|
||||||
'Required if using --headless'))
|
'Required if using --headless'))
|
||||||
run_subparser.add_argument(
|
run_subparser.add_argument(
|
||||||
'-w', '--workspace', default=None, nargs='?',
|
'-w', '--workspace', default=None, nargs='?',
|
||||||
help=('The workspace in which outputs will be saved. '
|
help=('The workspace in which outputs will be saved. '
|
||||||
'Required if using --headless'))
|
'Required if using --headless'))
|
||||||
run_subparser.add_argument(
|
run_subparser.add_argument(
|
||||||
'model', action=SelectModelAction, # Assert valid model name
|
'model', action=SelectModelAction, # Assert valid model name
|
||||||
help=('The model to run. Use "invest list" to list the available '
|
help=('The model to run. Use "invest list" to list the available '
|
||||||
'models.'))
|
'models.'))
|
||||||
|
|
||||||
validate_subparser = subparsers.add_parser(
|
validate_subparser = subparsers.add_parser(
|
||||||
'validate', help=(
|
'validate', help=(
|
||||||
'Validate the parameters of a datastack'))
|
'Validate the parameters of a datastack'))
|
||||||
validate_subparser.add_argument(
|
validate_subparser.add_argument(
|
||||||
'--json', action='store_true', help='Write output as a JSON object')
|
'--json', action='store_true', help='Write output as a JSON object')
|
||||||
validate_subparser.add_argument(
|
validate_subparser.add_argument(
|
||||||
'datastack', help=('Path to a JSON datastack.'))
|
'datastack', help=('Path to a JSON datastack.'))
|
||||||
|
|
||||||
getspec_subparser = subparsers.add_parser(
|
getspec_subparser = subparsers.add_parser(
|
||||||
'getspec', help=('Get the specification of a model.'))
|
'getspec', help=('Get the specification of a model.'))
|
||||||
getspec_subparser.add_argument(
|
getspec_subparser.add_argument(
|
||||||
'--json', action='store_true', help='Write output as a JSON object')
|
'--json', action='store_true', help='Write output as a JSON object')
|
||||||
getspec_subparser.add_argument(
|
getspec_subparser.add_argument(
|
||||||
'model', action=SelectModelAction, # Assert valid model name
|
'model', action=SelectModelAction, # Assert valid model name
|
||||||
help=('The model for which the spec should be fetched. Use "invest '
|
help=('The model for which the spec should be fetched. Use "invest '
|
||||||
'list" to list the available models.'))
|
'list" to list the available models.'))
|
||||||
|
|
||||||
serve_subparser = subparsers.add_parser(
|
serve_subparser = subparsers.add_parser(
|
||||||
'serve', help=('Start the flask app on the localhost.'))
|
'serve', help=('Start the flask app on the localhost.'))
|
||||||
serve_subparser.add_argument(
|
serve_subparser.add_argument(
|
||||||
'--port', type=int, default=56789,
|
'--port', type=int, default=56789,
|
||||||
help='Port number for the Flask server')
|
help='Port number for the Flask server')
|
||||||
|
|
||||||
export_py_subparser = subparsers.add_parser(
|
export_py_subparser = subparsers.add_parser(
|
||||||
'export-py', help=('Save a python script that executes a model.'))
|
'export-py', help=('Save a python script that executes a model.'))
|
||||||
export_py_subparser.add_argument(
|
export_py_subparser.add_argument(
|
||||||
'model', action=SelectModelAction, # Assert valid model name
|
'model', action=SelectModelAction, # Assert valid model name
|
||||||
help=('The model that the python script will execute. Use "invest '
|
help=('The model that the python script will execute. Use "invest '
|
||||||
'list" to list the available models.'))
|
'list" to list the available models.'))
|
||||||
export_py_subparser.add_argument(
|
export_py_subparser.add_argument(
|
||||||
'-f', '--filepath', default=None,
|
'-f', '--filepath', default=None,
|
||||||
help='Define a location for the saved .py file')
|
help='Define a location for the saved .py file')
|
||||||
|
|
||||||
args = parser.parse_args(user_args)
|
args = parser.parse_args(user_args)
|
||||||
natcap.invest.set_locale(args.language)
|
natcap.invest.set_locale(args.language)
|
||||||
|
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
handler = logging.StreamHandler(sys.stdout)
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
formatter = logging.Formatter(
|
formatter = logging.Formatter(
|
||||||
fmt='%(asctime)s %(name)-18s %(levelname)-8s %(message)s',
|
fmt='%(asctime)s %(name)-18s %(levelname)-8s %(message)s',
|
||||||
datefmt='%m/%d/%Y %H:%M:%S ')
|
datefmt='%m/%d/%Y %H:%M:%S ')
|
||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
|
|
||||||
# Set the log level based on what the user provides in the available
|
# Set the log level based on what the user provides in the available
|
||||||
# arguments. Verbosity: the more v's the lower the logging threshold.
|
# arguments. Verbosity: the more v's the lower the logging threshold.
|
||||||
# If --debug is used, the logging threshold is 10.
|
# If --debug is used, the logging threshold is 10.
|
||||||
# If the user goes lower than logging.DEBUG, default to logging.DEBUG.
|
# If the user goes lower than logging.DEBUG, default to logging.DEBUG.
|
||||||
log_level = min(args.log_level, logging.ERROR - (args.verbosity*10))
|
log_level = min(args.log_level, logging.ERROR - (args.verbosity*10))
|
||||||
handler.setLevel(max(log_level, logging.DEBUG)) # don't go below DEBUG
|
handler.setLevel(max(log_level, logging.DEBUG)) # don't go below DEBUG
|
||||||
root_logger.addHandler(handler)
|
root_logger.addHandler(handler)
|
||||||
LOGGER.info('Setting handler log level to %s', log_level)
|
LOGGER.info('Setting handler log level to %s', log_level)
|
||||||
|
|
||||||
# Set the log level for taskgraph.
|
# Set the log level for taskgraph.
|
||||||
taskgraph_log_level = logging.getLevelName(args.taskgraph_log_level.upper())
|
taskgraph_log_level = logging.getLevelName(args.taskgraph_log_level.upper())
|
||||||
logging.getLogger('taskgraph').setLevel(taskgraph_log_level)
|
logging.getLogger('taskgraph').setLevel(taskgraph_log_level)
|
||||||
LOGGER.debug('Setting taskgraph log level to %s', taskgraph_log_level)
|
LOGGER.debug('Setting taskgraph log level to %s', taskgraph_log_level)
|
||||||
|
|
||||||
# FYI: Root logger by default has a level of logging.WARNING.
|
# FYI: Root logger by default has a level of logging.WARNING.
|
||||||
# To capture ALL logging produced in this system at runtime, use this:
|
# To capture ALL logging produced in this system at runtime, use this:
|
||||||
# logging.getLogger().setLevel(logging.DEBUG)
|
# logging.getLogger().setLevel(logging.DEBUG)
|
||||||
# Also FYI: using logging.DEBUG means that the logger will defer to
|
# Also FYI: using logging.DEBUG means that the logger will defer to
|
||||||
# the setting of the parent logger.
|
# the setting of the parent logger.
|
||||||
logging.getLogger('natcap').setLevel(logging.DEBUG)
|
logging.getLogger('natcap').setLevel(logging.DEBUG)
|
||||||
|
|
||||||
if args.subcommand == 'list':
|
if args.subcommand == 'list':
|
||||||
# reevaluate the model names in the new language
|
# reevaluate the model names in the new language
|
||||||
importlib.reload(model_metadata)
|
importlib.reload(model_metadata)
|
||||||
if args.json:
|
|
||||||
message = build_model_list_json()
|
|
||||||
else:
|
|
||||||
message = build_model_list_table()
|
|
||||||
|
|
||||||
sys.stdout.write(message)
|
|
||||||
parser.exit()
|
|
||||||
|
|
||||||
if args.subcommand == 'validate':
|
|
||||||
try:
|
|
||||||
parsed_datastack = datastack.extract_parameter_set(args.datastack)
|
|
||||||
except Exception as error:
|
|
||||||
parser.exit(
|
|
||||||
1, "Error when parsing JSON datastack:\n " + str(error))
|
|
||||||
|
|
||||||
# reload validation module first so it's also in the correct language
|
|
||||||
importlib.reload(importlib.import_module('natcap.invest.validation'))
|
|
||||||
model_module = importlib.reload(importlib.import_module(
|
|
||||||
name=parsed_datastack.model_name))
|
|
||||||
|
|
||||||
try:
|
|
||||||
validation_result = model_module.validate(parsed_datastack.args)
|
|
||||||
except KeyError as missing_keys_error:
|
|
||||||
if args.json:
|
if args.json:
|
||||||
message = json.dumps(
|
message = build_model_list_json()
|
||||||
{'validation_results': {
|
|
||||||
str(list(missing_keys_error.args)): 'Key is missing'}})
|
|
||||||
else:
|
else:
|
||||||
message = ('Datastack is missing keys:\n ' +
|
message = build_model_list_table()
|
||||||
str(missing_keys_error.args))
|
|
||||||
|
|
||||||
# Missing keys have an exit code of 1 because that would indicate
|
|
||||||
# probably programmer error.
|
|
||||||
sys.stdout.write(message)
|
sys.stdout.write(message)
|
||||||
parser.exit(1)
|
parser.exit()
|
||||||
except Exception as error:
|
|
||||||
parser.exit(
|
|
||||||
1, ('Datastack could not be validated:\n ' +
|
|
||||||
str(error)))
|
|
||||||
|
|
||||||
# Even validation errors will have an exit code of 0
|
if args.subcommand == 'validate':
|
||||||
if args.json:
|
try:
|
||||||
message = json.dumps({
|
parsed_datastack = datastack.extract_parameter_set(args.datastack)
|
||||||
'validation_results': validation_result})
|
except Exception as error:
|
||||||
else:
|
|
||||||
message = pprint.pformat(validation_result)
|
|
||||||
|
|
||||||
sys.stdout.write(message)
|
|
||||||
parser.exit(0)
|
|
||||||
|
|
||||||
if args.subcommand == 'getspec':
|
|
||||||
target_model = model_metadata.MODEL_METADATA[args.model].pyname
|
|
||||||
model_module = importlib.reload(
|
|
||||||
importlib.import_module(name=target_model))
|
|
||||||
spec = model_module.MODEL_SPEC
|
|
||||||
|
|
||||||
if args.json:
|
|
||||||
message = spec_utils.serialize_args_spec(spec)
|
|
||||||
else:
|
|
||||||
message = pprint.pformat(spec)
|
|
||||||
sys.stdout.write(message)
|
|
||||||
parser.exit(0)
|
|
||||||
|
|
||||||
if args.subcommand == 'run':
|
|
||||||
if args.headless:
|
|
||||||
warnings.warn(
|
|
||||||
'--headless (-l) is now the default (and only) behavior '
|
|
||||||
'for `invest run`. This flag will not be recognized '
|
|
||||||
'in the future.', FutureWarning, stacklevel=2) # 2 for brevity
|
|
||||||
if not args.datastack:
|
|
||||||
parser.exit(1, 'Datastack required for execution.')
|
|
||||||
|
|
||||||
try:
|
|
||||||
parsed_datastack = datastack.extract_parameter_set(args.datastack)
|
|
||||||
except Exception as error:
|
|
||||||
parser.exit(
|
|
||||||
1, "Error when parsing JSON datastack:\n " + str(error))
|
|
||||||
|
|
||||||
if not args.workspace:
|
|
||||||
if ('workspace_dir' not in parsed_datastack.args or
|
|
||||||
parsed_datastack.args['workspace_dir'] in ['', None]):
|
|
||||||
parser.exit(
|
parser.exit(
|
||||||
1, ('Workspace must be defined at the command line '
|
1, "Error when parsing JSON datastack:\n " + str(error))
|
||||||
'or in the datastack file'))
|
|
||||||
else:
|
|
||||||
parsed_datastack.args['workspace_dir'] = args.workspace
|
|
||||||
|
|
||||||
target_model = model_metadata.MODEL_METADATA[args.model].pyname
|
# reload validation module first so it's also in the correct language
|
||||||
model_module = importlib.import_module(name=target_model)
|
importlib.reload(importlib.import_module('natcap.invest.validation'))
|
||||||
LOGGER.info('Imported target %s from %s',
|
model_module = importlib.reload(importlib.import_module(
|
||||||
model_module.__name__, model_module)
|
name=parsed_datastack.model_name))
|
||||||
|
|
||||||
with utils.prepare_workspace(parsed_datastack.args['workspace_dir'],
|
try:
|
||||||
name=parsed_datastack.model_name,
|
validation_result = model_module.validate(parsed_datastack.args)
|
||||||
logging_level=log_level):
|
except KeyError as missing_keys_error:
|
||||||
LOGGER.log(datastack.ARGS_LOG_LEVEL,
|
if args.json:
|
||||||
'Starting model with parameters: \n%s',
|
message = json.dumps(
|
||||||
datastack.format_args_dict(parsed_datastack.args,
|
{'validation_results': {
|
||||||
parsed_datastack.model_name))
|
str(list(missing_keys_error.args)): 'Key is missing'}})
|
||||||
|
else:
|
||||||
|
message = ('Datastack is missing keys:\n ' +
|
||||||
|
str(missing_keys_error.args))
|
||||||
|
|
||||||
# We're deliberately not validating here because the user
|
# Missing keys have an exit code of 1 because that would indicate
|
||||||
# can just call ``invest validate <datastack>`` to validate.
|
# probably programmer error.
|
||||||
#
|
sys.stdout.write(message)
|
||||||
# Exceptions will already be logged to the logfile but will ALSO be
|
parser.exit(1)
|
||||||
# written to stdout if this exception is uncaught. This is by
|
except Exception as error:
|
||||||
# design.
|
parser.exit(
|
||||||
model_module.execute(parsed_datastack.args)
|
1, ('Datastack could not be validated:\n ' +
|
||||||
|
str(error)))
|
||||||
|
|
||||||
if args.subcommand == 'serve':
|
# Even validation errors will have an exit code of 0
|
||||||
ui_server.app.run(port=args.port)
|
if args.json:
|
||||||
parser.exit(0)
|
message = json.dumps({
|
||||||
|
'validation_results': validation_result})
|
||||||
|
else:
|
||||||
|
message = pprint.pformat(validation_result)
|
||||||
|
|
||||||
if args.subcommand == 'export-py':
|
sys.stdout.write(message)
|
||||||
target_filepath = args.filepath
|
parser.exit(0)
|
||||||
if not args.filepath:
|
|
||||||
target_filepath = f'{args.model}_execute.py'
|
if args.subcommand == 'getspec':
|
||||||
export_to_python(target_filepath, args.model)
|
target_model = model_metadata.MODEL_METADATA[args.model].pyname
|
||||||
parser.exit()
|
model_module = importlib.reload(
|
||||||
|
importlib.import_module(name=target_model))
|
||||||
|
spec = model_module.MODEL_SPEC
|
||||||
|
|
||||||
|
if args.json:
|
||||||
|
message = spec_utils.serialize_args_spec(spec)
|
||||||
|
else:
|
||||||
|
message = pprint.pformat(spec)
|
||||||
|
sys.stdout.write(message)
|
||||||
|
parser.exit(0)
|
||||||
|
|
||||||
|
if args.subcommand == 'run':
|
||||||
|
if args.headless:
|
||||||
|
warnings.warn(
|
||||||
|
'--headless (-l) is now the default (and only) behavior '
|
||||||
|
'for `invest run`. This flag will not be recognized '
|
||||||
|
'in the future.', FutureWarning, stacklevel=2) # 2 for brevity
|
||||||
|
if not args.datastack:
|
||||||
|
parser.exit(1, 'Datastack required for execution.')
|
||||||
|
|
||||||
|
try:
|
||||||
|
parsed_datastack = datastack.extract_parameter_set(args.datastack)
|
||||||
|
except Exception as error:
|
||||||
|
parser.exit(
|
||||||
|
1, "Error when parsing JSON datastack:\n " + str(error))
|
||||||
|
|
||||||
|
if not args.workspace:
|
||||||
|
if ('workspace_dir' not in parsed_datastack.args or
|
||||||
|
parsed_datastack.args['workspace_dir'] in ['', None]):
|
||||||
|
parser.exit(
|
||||||
|
1, ('Workspace must be defined at the command line '
|
||||||
|
'or in the datastack file'))
|
||||||
|
else:
|
||||||
|
parsed_datastack.args['workspace_dir'] = args.workspace
|
||||||
|
|
||||||
|
target_model = model_metadata.MODEL_METADATA[args.model].pyname
|
||||||
|
model_module = importlib.import_module(name=target_model)
|
||||||
|
LOGGER.info('Imported target %s from %s',
|
||||||
|
model_module.__name__, model_module)
|
||||||
|
|
||||||
|
with utils.prepare_workspace(parsed_datastack.args['workspace_dir'],
|
||||||
|
name=parsed_datastack.model_name,
|
||||||
|
logging_level=log_level):
|
||||||
|
LOGGER.log(datastack.ARGS_LOG_LEVEL,
|
||||||
|
'Starting model with parameters: \n%s',
|
||||||
|
datastack.format_args_dict(parsed_datastack.args,
|
||||||
|
parsed_datastack.model_name))
|
||||||
|
|
||||||
|
# We're deliberately not validating here because the user
|
||||||
|
# can just call ``invest validate <datastack>`` to validate.
|
||||||
|
#
|
||||||
|
# Exceptions will already be logged to the logfile but will ALSO be
|
||||||
|
# written to stdout if this exception is uncaught. This is by
|
||||||
|
# design.
|
||||||
|
model_module.execute(parsed_datastack.args)
|
||||||
|
|
||||||
|
if args.subcommand == 'serve':
|
||||||
|
ui_server.app.run(port=args.port)
|
||||||
|
parser.exit(0)
|
||||||
|
|
||||||
|
if args.subcommand == 'export-py':
|
||||||
|
target_filepath = args.filepath
|
||||||
|
if not args.filepath:
|
||||||
|
target_filepath = f'{args.model}_execute.py'
|
||||||
|
export_to_python(target_filepath, args.model)
|
||||||
|
parser.exit()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -1737,15 +1737,16 @@ def extract_bathymetry_along_ray(
|
||||||
iy = int((point.y - bathy_gt[3]) / bathy_gt[5])
|
iy = int((point.y - bathy_gt[3]) / bathy_gt[5])
|
||||||
win_size = 1
|
win_size = 1
|
||||||
|
|
||||||
value = bathy_band.ReadAsArray(
|
try:
|
||||||
xoff=ix, yoff=iy,
|
value = bathy_band.ReadAsArray(
|
||||||
win_xsize=win_size, win_ysize=win_size)
|
xoff=ix, yoff=iy,
|
||||||
if value is None:
|
win_xsize=win_size, win_ysize=win_size)
|
||||||
|
except RuntimeError as ex:
|
||||||
location = {'xoff': ix, 'yoff': iy, 'win_xsize': win_size,
|
location = {'xoff': ix, 'yoff': iy, 'win_xsize': win_size,
|
||||||
'win_ysize': win_size}
|
'win_ysize': win_size}
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'got a {value} when trying to read bathymetry at {location}. '
|
f'Failed to read bathymetry at {location}. Does the bathymetry '
|
||||||
'Does the bathymetry input fully cover the fetch ray area?')
|
'input fully cover the fetch ray area?') from ex
|
||||||
if bathy_nodata is None or not numpy.isclose(
|
if bathy_nodata is None or not numpy.isclose(
|
||||||
value[0][0], bathy_nodata, equal_nan=True):
|
value[0][0], bathy_nodata, equal_nan=True):
|
||||||
bathy_values.append(value)
|
bathy_values.append(value)
|
||||||
|
@ -2468,25 +2469,26 @@ def search_for_vector_habitat(
|
||||||
|
|
||||||
geometry = feature.GetGeometryRef()
|
geometry = feature.GetGeometryRef()
|
||||||
if not geometry.IsValid():
|
if not geometry.IsValid():
|
||||||
geometry = geometry.Buffer(0) # sometimes this fixes geometry
|
try:
|
||||||
if geometry is not None: # geometry is None if the buffer failed.
|
geometry = geometry.Buffer(0) # sometimes this fixes geometry
|
||||||
clipped_geometry = geometry.Intersection(base_srs_clipping_geom)
|
except RuntimeError:
|
||||||
if not clipped_geometry.IsEmpty():
|
LOGGER.warning(
|
||||||
if target_spatial_reference != base_spatial_reference:
|
f"FID {feature.GetFID()} in {habitat_vector_path} has invalid "
|
||||||
err_code = clipped_geometry.Transform(transform)
|
"geometry and will be excluded")
|
||||||
if err_code != 0:
|
continue
|
||||||
LOGGER.warning(
|
clipped_geometry = geometry.Intersection(base_srs_clipping_geom)
|
||||||
f"Could not transform feature from "
|
if not clipped_geometry.IsEmpty():
|
||||||
f"{habitat_vector_path} to spatial reference "
|
if target_spatial_reference != base_spatial_reference:
|
||||||
"system of AOI")
|
err_code = clipped_geometry.Transform(transform)
|
||||||
continue
|
if err_code != 0:
|
||||||
shapely_geom = shapely.wkb.loads(
|
LOGGER.warning(
|
||||||
bytes(clipped_geometry.ExportToWkb()))
|
f"Could not transform feature from "
|
||||||
shapely_geometry_list.extend(_list_geometry(shapely_geom))
|
f"{habitat_vector_path} to spatial reference "
|
||||||
else:
|
"system of AOI")
|
||||||
LOGGER.warning(
|
continue
|
||||||
f"FID {feature.GetFID()} in {habitat_vector_path} has invalid "
|
shapely_geom = shapely.wkb.loads(
|
||||||
"geometry and will be excluded")
|
bytes(clipped_geometry.ExportToWkb()))
|
||||||
|
shapely_geometry_list.extend(_list_geometry(shapely_geom))
|
||||||
|
|
||||||
if not shapely_geometry_list:
|
if not shapely_geometry_list:
|
||||||
LOGGER.warning(f'No valid features exist in {habitat_vector_path}')
|
LOGGER.warning(f'No valid features exist in {habitat_vector_path}')
|
||||||
|
|
|
@ -782,14 +782,17 @@ def _build_spatial_index(
|
||||||
# put all the polygons in the kd_tree because it's fast and simple
|
# put all the polygons in the kd_tree because it's fast and simple
|
||||||
for poly_feature in model_layer:
|
for poly_feature in model_layer:
|
||||||
poly_geom = poly_feature.GetGeometryRef()
|
poly_geom = poly_feature.GetGeometryRef()
|
||||||
poly_centroid = poly_geom.Centroid()
|
if poly_geom.IsValid():
|
||||||
# put in row/col order since rasters are row/col indexed
|
poly_centroid = poly_geom.Centroid()
|
||||||
kd_points.append([poly_centroid.GetY(), poly_centroid.GetX()])
|
# put in row/col order since rasters are row/col indexed
|
||||||
|
kd_points.append([poly_centroid.GetY(), poly_centroid.GetX()])
|
||||||
|
|
||||||
theta_model_parameters.append([
|
theta_model_parameters.append([
|
||||||
poly_feature.GetField(feature_id) for feature_id in
|
poly_feature.GetField(feature_id) for feature_id in
|
||||||
['theta1', 'theta2', 'theta3']])
|
['theta1', 'theta2', 'theta3']])
|
||||||
method_model_parameter.append(poly_feature.GetField('method'))
|
method_model_parameter.append(poly_feature.GetField('method'))
|
||||||
|
else:
|
||||||
|
LOGGER.warning(f'skipping invalid geometry {poly_geom}')
|
||||||
|
|
||||||
method_model_parameter = numpy.array(
|
method_model_parameter = numpy.array(
|
||||||
method_model_parameter, dtype=numpy.int32)
|
method_model_parameter, dtype=numpy.int32)
|
||||||
|
|
|
@ -27,7 +27,8 @@ When we are ready to get a new batch of translations, here is the process. These
|
||||||
pybabel extract \
|
pybabel extract \
|
||||||
--no-wrap \
|
--no-wrap \
|
||||||
--project InVEST \
|
--project InVEST \
|
||||||
--msgid-bugs-address esoth@stanford.edu \
|
--version $(python -m setuptools_scm) \
|
||||||
|
--msgid-bugs-address natcap-software@lists.stanford.edu \
|
||||||
--copyright-holder "Natural Capital Project" \
|
--copyright-holder "Natural Capital Project" \
|
||||||
--output src/natcap/invest/internationalization/messages.pot \
|
--output src/natcap/invest/internationalization/messages.pot \
|
||||||
src/
|
src/
|
||||||
|
@ -38,6 +39,7 @@ pybabel update \
|
||||||
--input-file src/natcap/invest/internationalization/messages.pot \
|
--input-file src/natcap/invest/internationalization/messages.pot \
|
||||||
--output-file src/natcap/invest/internationalization/locales/$LL/LC_MESSAGES/messages.po
|
--output-file src/natcap/invest/internationalization/locales/$LL/LC_MESSAGES/messages.po
|
||||||
```
|
```
|
||||||
|
This looks through the source code for strings wrapped in the `gettext(...)` function and writes them to the message catalog template. Then it updates the message catalog for the specificed language. New strings that don't yet have a translation will have an empty `msgstr` value. Previously translated messages that are no longer needed will be commented out but remain in the file. This will save translator time if they're needed again in the future.
|
||||||
|
|
||||||
2. Check that the changes look correct, then commit:
|
2. Check that the changes look correct, then commit:
|
||||||
```
|
```
|
||||||
|
@ -45,7 +47,6 @@ git diff
|
||||||
git add src/natcap/invest/internationalization/messages.pot src/natcap/invest/internationalization/locales/$LL/LC_MESSAGES/messages.po
|
git add src/natcap/invest/internationalization/messages.pot src/natcap/invest/internationalization/locales/$LL/LC_MESSAGES/messages.po
|
||||||
git commit -m "extract message catalog template and update $LL catalog from it"
|
git commit -m "extract message catalog template and update $LL catalog from it"
|
||||||
```
|
```
|
||||||
This looks through the source code for strings wrapped in the `gettext(...)` function and writes them to the message catalog template. Then it updates the message catalog for the specificed language. New strings that don't yet have a translation will have an empty `msgstr` value. Previously translated messages that are no longer needed will be commented out but remain in the file. This will save translator time if they're needed again in the future.
|
|
||||||
|
|
||||||
3. Send `src/natcap/invest/internationalization/locales/$LL/LC_MESSAGES/messages.po` to the translator and wait to get it back. The translator will fill in the `msgstr` values for any new or edited messages.
|
3. Send `src/natcap/invest/internationalization/locales/$LL/LC_MESSAGES/messages.po` to the translator and wait to get it back. The translator will fill in the `msgstr` values for any new or edited messages.
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -855,6 +855,7 @@ def execute(args):
|
||||||
floral_resources_index_path_map[species],
|
floral_resources_index_path_map[species],
|
||||||
convolve_ps_path],
|
convolve_ps_path],
|
||||||
target_path=pollinator_abundance_path,
|
target_path=pollinator_abundance_path,
|
||||||
|
target_dtype=numpy.float32,
|
||||||
target_nodata=_INDEX_NODATA),
|
target_nodata=_INDEX_NODATA),
|
||||||
dependent_task_list=[
|
dependent_task_list=[
|
||||||
foraged_flowers_index_task_map[(species, season)],
|
foraged_flowers_index_task_map[(species, season)],
|
||||||
|
@ -935,7 +936,9 @@ def execute(args):
|
||||||
rasters=[
|
rasters=[
|
||||||
half_saturation_raster_path,
|
half_saturation_raster_path,
|
||||||
total_pollinator_abundance_index_path],
|
total_pollinator_abundance_index_path],
|
||||||
target_path=farm_pollinator_season_path),
|
target_path=farm_pollinator_season_path,
|
||||||
|
target_dtype=numpy.float32,
|
||||||
|
target_nodata=_INDEX_NODATA),
|
||||||
dependent_task_list=[
|
dependent_task_list=[
|
||||||
half_saturation_task, total_pollinator_abundance_task[season]],
|
half_saturation_task, total_pollinator_abundance_task[season]],
|
||||||
target_path_list=[farm_pollinator_season_path]))
|
target_path_list=[farm_pollinator_season_path]))
|
||||||
|
@ -976,7 +979,9 @@ def execute(args):
|
||||||
kwargs=dict(
|
kwargs=dict(
|
||||||
op=pyt_op,
|
op=pyt_op,
|
||||||
rasters=[managed_pollinator_path, farm_pollinator_path],
|
rasters=[managed_pollinator_path, farm_pollinator_path],
|
||||||
target_path=total_pollinator_yield_path),
|
target_path=total_pollinator_yield_path,
|
||||||
|
target_dtype=numpy.float32,
|
||||||
|
target_nodata=_INDEX_NODATA),
|
||||||
dependent_task_list=[farm_pollinator_task, managed_pollinator_task],
|
dependent_task_list=[farm_pollinator_task, managed_pollinator_task],
|
||||||
target_path_list=[total_pollinator_yield_path])
|
target_path_list=[total_pollinator_yield_path])
|
||||||
|
|
||||||
|
@ -984,12 +989,14 @@ def execute(args):
|
||||||
wild_pollinator_yield_path = os.path.join(
|
wild_pollinator_yield_path = os.path.join(
|
||||||
output_dir, _WILD_POLLINATOR_YIELD_FILE_PATTERN % file_suffix)
|
output_dir, _WILD_POLLINATOR_YIELD_FILE_PATTERN % file_suffix)
|
||||||
wild_pollinator_task = task_graph.add_task(
|
wild_pollinator_task = task_graph.add_task(
|
||||||
task_name='calcualte_wild_pollinators',
|
task_name='calculate_wild_pollinators',
|
||||||
func=pygeoprocessing.raster_map,
|
func=pygeoprocessing.raster_map,
|
||||||
kwargs=dict(
|
kwargs=dict(
|
||||||
op=pyw_op,
|
op=pyw_op,
|
||||||
rasters=[managed_pollinator_path, total_pollinator_yield_path],
|
rasters=[managed_pollinator_path, total_pollinator_yield_path],
|
||||||
target_path=wild_pollinator_yield_path),
|
target_path=wild_pollinator_yield_path,
|
||||||
|
target_dtype=numpy.float32,
|
||||||
|
target_nodata=_INDEX_NODATA),
|
||||||
dependent_task_list=[pyt_task, managed_pollinator_task],
|
dependent_task_list=[pyt_task, managed_pollinator_task],
|
||||||
target_path_list=[wild_pollinator_yield_path])
|
target_path_list=[wild_pollinator_yield_path])
|
||||||
|
|
||||||
|
@ -1392,7 +1399,8 @@ def _sum_arrays(*array_list):
|
||||||
result = numpy.empty_like(array_list[0])
|
result = numpy.empty_like(array_list[0])
|
||||||
result[:] = 0
|
result[:] = 0
|
||||||
for array in array_list:
|
for array in array_list:
|
||||||
local_valid_mask = ~pygeoprocessing.array_equals_nodata(array, _INDEX_NODATA)
|
local_valid_mask = ~pygeoprocessing.array_equals_nodata(
|
||||||
|
array, _INDEX_NODATA)
|
||||||
result[local_valid_mask] += array[local_valid_mask]
|
result[local_valid_mask] += array[local_valid_mask]
|
||||||
valid_mask |= local_valid_mask
|
valid_mask |= local_valid_mask
|
||||||
result[~valid_mask] = _INDEX_NODATA
|
result[~valid_mask] = _INDEX_NODATA
|
||||||
|
@ -1423,6 +1431,7 @@ def _calculate_habitat_nesting_index(
|
||||||
pygeoprocessing.raster_map(
|
pygeoprocessing.raster_map(
|
||||||
op=max_op,
|
op=max_op,
|
||||||
rasters=substrate_path_list,
|
rasters=substrate_path_list,
|
||||||
|
target_dtype=numpy.float32,
|
||||||
target_path=target_habitat_nesting_index_path)
|
target_path=target_habitat_nesting_index_path)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1432,6 +1441,7 @@ def _multiply_by_scalar(raster_path, scalar, target_path):
|
||||||
op=lambda array: array * scalar,
|
op=lambda array: array * scalar,
|
||||||
rasters=[raster_path],
|
rasters=[raster_path],
|
||||||
target_path=target_path,
|
target_path=target_path,
|
||||||
|
target_dtype=numpy.float32,
|
||||||
target_nodata=_INDEX_NODATA,
|
target_nodata=_INDEX_NODATA,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1455,6 +1465,7 @@ def _calculate_pollinator_supply_index(
|
||||||
op=lambda f_r, h_n: species_abundance * f_r * h_n,
|
op=lambda f_r, h_n: species_abundance * f_r * h_n,
|
||||||
rasters=[habitat_nesting_suitability_path, floral_resources_path],
|
rasters=[habitat_nesting_suitability_path, floral_resources_path],
|
||||||
target_path=target_path,
|
target_path=target_path,
|
||||||
|
target_dtype=numpy.float32,
|
||||||
target_nodata=_INDEX_NODATA
|
target_nodata=_INDEX_NODATA
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1603,26 +1603,18 @@ def _validate_same_projection(base_vector_path, table_path):
|
||||||
|
|
||||||
invalid_projections = False
|
invalid_projections = False
|
||||||
for path in data_paths:
|
for path in data_paths:
|
||||||
|
gis_type = pygeoprocessing.get_gis_type(path)
|
||||||
def error_handler(err_level, err_no, err_msg):
|
if gis_type == pygeoprocessing.UNKNOWN_TYPE:
|
||||||
"""Empty error handler to avoid stderr output."""
|
return f"{path} did not load"
|
||||||
pass
|
elif gis_type == pygeoprocessing.RASTER_TYPE:
|
||||||
gdal.PushErrorHandler(error_handler)
|
raster = gdal.OpenEx(path, gdal.OF_RASTER)
|
||||||
raster = gdal.OpenEx(path, gdal.OF_RASTER)
|
|
||||||
gdal.PopErrorHandler()
|
|
||||||
if raster is not None:
|
|
||||||
projection_as_str = raster.GetProjection()
|
projection_as_str = raster.GetProjection()
|
||||||
ref = osr.SpatialReference()
|
ref = osr.SpatialReference()
|
||||||
ref.ImportFromWkt(projection_as_str)
|
ref.ImportFromWkt(projection_as_str)
|
||||||
raster = None
|
|
||||||
else:
|
else:
|
||||||
vector = gdal.OpenEx(path, gdal.OF_VECTOR)
|
vector = gdal.OpenEx(path, gdal.OF_VECTOR)
|
||||||
if vector is None:
|
|
||||||
return f"{path} did not load"
|
|
||||||
layer = vector.GetLayer()
|
layer = vector.GetLayer()
|
||||||
ref = osr.SpatialReference(layer.GetSpatialRef().ExportToWkt())
|
ref = osr.SpatialReference(layer.GetSpatialRef().ExportToWkt())
|
||||||
layer = None
|
|
||||||
vector = None
|
|
||||||
if not base_ref.IsSame(ref):
|
if not base_ref.IsSame(ref):
|
||||||
invalid_projections = True
|
invalid_projections = True
|
||||||
if invalid_projections:
|
if invalid_projections:
|
||||||
|
|
|
@ -659,33 +659,8 @@ def execute(args):
|
||||||
output_align_list = [
|
output_align_list = [
|
||||||
file_registry['lulc_aligned_path'], file_registry['dem_aligned_path']]
|
file_registry['lulc_aligned_path'], file_registry['dem_aligned_path']]
|
||||||
if not args['user_defined_local_recharge']:
|
if not args['user_defined_local_recharge']:
|
||||||
precip_path_list = []
|
precip_path_list = _get_monthly_file_lists(N_MONTHS, args['precip_dir'])
|
||||||
et0_path_list = []
|
et0_path_list = _get_monthly_file_lists(N_MONTHS, args['et0_dir'])
|
||||||
|
|
||||||
et0_dir_list = [
|
|
||||||
os.path.join(args['et0_dir'], f) for f in os.listdir(
|
|
||||||
args['et0_dir'])]
|
|
||||||
precip_dir_list = [
|
|
||||||
os.path.join(args['precip_dir'], f) for f in os.listdir(
|
|
||||||
args['precip_dir'])]
|
|
||||||
|
|
||||||
for month_index in range(1, N_MONTHS + 1):
|
|
||||||
month_file_match = re.compile(r'.*[^\d]%d\.[^.]+$' % month_index)
|
|
||||||
|
|
||||||
for data_type, dir_list, path_list in [
|
|
||||||
('et0', et0_dir_list, et0_path_list),
|
|
||||||
('Precip', precip_dir_list, precip_path_list)]:
|
|
||||||
file_list = [
|
|
||||||
month_file_path for month_file_path in dir_list
|
|
||||||
if month_file_match.match(month_file_path)]
|
|
||||||
if len(file_list) == 0:
|
|
||||||
raise ValueError(
|
|
||||||
"No %s found for month %d" % (data_type, month_index))
|
|
||||||
if len(file_list) > 1:
|
|
||||||
raise ValueError(
|
|
||||||
"Ambiguous set of files found for month %d: %s" %
|
|
||||||
(month_index, file_list))
|
|
||||||
path_list.append(file_list[0])
|
|
||||||
|
|
||||||
input_align_list = (
|
input_align_list = (
|
||||||
precip_path_list + [args['soil_group_path']] + et0_path_list +
|
precip_path_list + [args['soil_group_path']] + et0_path_list +
|
||||||
|
@ -1380,6 +1355,42 @@ def _aggregate_recharge(
|
||||||
aggregate_vector = None
|
aggregate_vector = None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_monthly_file_lists(n_months, in_dir):
|
||||||
|
"""Create list of monthly files for data type
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
n_months (int): Number of months to iterate over (should be 12)
|
||||||
|
in_dir (string): Path to directory of monthly files (for specific
|
||||||
|
data type)
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If no file or multiple files are found for a month
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: contains monthly file paths for data type
|
||||||
|
"""
|
||||||
|
in_path_list = [os.path.join(in_dir, f) for f in os.listdir(in_dir)]
|
||||||
|
out_path_list = []
|
||||||
|
|
||||||
|
for month_index in range(1, n_months + 1):
|
||||||
|
month_file_pattern = re.compile(r'.*[^\d]0?%d\.[^.]+$' % month_index)
|
||||||
|
file_list = [
|
||||||
|
month_file_path for month_file_path in in_path_list
|
||||||
|
if month_file_pattern.match(month_file_path)]
|
||||||
|
if len(file_list) == 0:
|
||||||
|
raise ValueError(
|
||||||
|
"No files found in %s for month %d. Please ensure that \
|
||||||
|
filenames end in the month number (e.g., precip_1.tif)."
|
||||||
|
% (in_dir, month_index))
|
||||||
|
if len(file_list) > 1:
|
||||||
|
raise ValueError(
|
||||||
|
"Ambiguous set of files found for month %d: %s" %
|
||||||
|
(month_index, file_list))
|
||||||
|
out_path_list.append(file_list[0])
|
||||||
|
|
||||||
|
return out_path_list
|
||||||
|
|
||||||
|
|
||||||
@validation.invest_validator
|
@validation.invest_validator
|
||||||
def validate(args, limit_to=None):
|
def validate(args, limit_to=None):
|
||||||
"""Validate args to ensure they conform to `execute`'s contract.
|
"""Validate args to ensure they conform to `execute`'s contract.
|
||||||
|
|
|
@ -2093,11 +2093,10 @@ def _write_supply_demand_vector(source_aoi_vector_path, feature_attrs,
|
||||||
Returns:
|
Returns:
|
||||||
``None``
|
``None``
|
||||||
"""
|
"""
|
||||||
source_vector = ogr.Open(source_aoi_vector_path)
|
gdal.VectorTranslate(
|
||||||
driver = ogr.GetDriverByName('GPKG')
|
target_aoi_vector_path, source_aoi_vector_path,
|
||||||
driver.CopyDataSource(source_vector, target_aoi_vector_path)
|
format='GPKG',
|
||||||
source_vector = None
|
preserveFID=True)
|
||||||
driver = None
|
|
||||||
|
|
||||||
target_vector = gdal.OpenEx(target_aoi_vector_path, gdal.GA_Update)
|
target_vector = gdal.OpenEx(target_aoi_vector_path, gdal.GA_Update)
|
||||||
target_layer = target_vector.GetLayer()
|
target_layer = target_vector.GetLayer()
|
||||||
|
|
|
@ -50,7 +50,9 @@ def _log_gdal_errors(*args, **kwargs):
|
||||||
"""Log error messages to osgeo.
|
"""Log error messages to osgeo.
|
||||||
|
|
||||||
All error messages are logged with reasonable ``logging`` levels based
|
All error messages are logged with reasonable ``logging`` levels based
|
||||||
on the GDAL error level.
|
on the GDAL error level. While we are now using ``gdal.UseExceptions()``,
|
||||||
|
we still need this to handle GDAL logging that does not get raised as
|
||||||
|
an exception.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
This function is designed to accept any number of positional and
|
This function is designed to accept any number of positional and
|
||||||
|
|
|
@ -304,19 +304,16 @@ def check_raster(filepath, projected=False, projection_units=None, **kwargs):
|
||||||
if file_warning:
|
if file_warning:
|
||||||
return file_warning
|
return file_warning
|
||||||
|
|
||||||
gdal.PushErrorHandler('CPLQuietErrorHandler')
|
try:
|
||||||
gdal_dataset = gdal.OpenEx(filepath, gdal.OF_RASTER)
|
gdal_dataset = gdal.OpenEx(filepath, gdal.OF_RASTER)
|
||||||
gdal.PopErrorHandler()
|
except RuntimeError:
|
||||||
|
|
||||||
if gdal_dataset is None:
|
|
||||||
return MESSAGES['NOT_GDAL_RASTER']
|
return MESSAGES['NOT_GDAL_RASTER']
|
||||||
|
|
||||||
# Check that an overview .ovr file wasn't opened.
|
# Check that an overview .ovr file wasn't opened.
|
||||||
if os.path.splitext(filepath)[1] == '.ovr':
|
if os.path.splitext(filepath)[1] == '.ovr':
|
||||||
return MESSAGES['OVR_FILE']
|
return MESSAGES['OVR_FILE']
|
||||||
|
|
||||||
srs = osr.SpatialReference()
|
srs = gdal_dataset.GetSpatialRef()
|
||||||
srs.ImportFromWkt(gdal_dataset.GetProjection())
|
|
||||||
|
|
||||||
projection_warning = _check_projection(srs, projected, projection_units)
|
projection_warning = _check_projection(srs, projected, projection_units)
|
||||||
if projection_warning:
|
if projection_warning:
|
||||||
gdal_dataset = None
|
gdal_dataset = None
|
||||||
|
@ -378,9 +375,10 @@ def check_vector(filepath, geometries, fields=None, projected=False,
|
||||||
if file_warning:
|
if file_warning:
|
||||||
return file_warning
|
return file_warning
|
||||||
|
|
||||||
gdal.PushErrorHandler('CPLQuietErrorHandler')
|
try:
|
||||||
gdal_dataset = gdal.OpenEx(filepath, gdal.OF_VECTOR)
|
gdal_dataset = gdal.OpenEx(filepath, gdal.OF_VECTOR)
|
||||||
gdal.PopErrorHandler()
|
except RuntimeError:
|
||||||
|
return MESSAGES['NOT_GDAL_VECTOR']
|
||||||
|
|
||||||
geom_map = {
|
geom_map = {
|
||||||
'POINT': [ogr.wkbPoint, ogr.wkbPointM, ogr.wkbPointZM,
|
'POINT': [ogr.wkbPoint, ogr.wkbPointM, ogr.wkbPointZM,
|
||||||
|
@ -402,9 +400,6 @@ def check_vector(filepath, geometries, fields=None, projected=False,
|
||||||
for geom in geometries:
|
for geom in geometries:
|
||||||
allowed_geom_types += geom_map[geom]
|
allowed_geom_types += geom_map[geom]
|
||||||
|
|
||||||
if gdal_dataset is None:
|
|
||||||
return MESSAGES['NOT_GDAL_VECTOR']
|
|
||||||
|
|
||||||
# NOTE: this only checks the layer geometry type, not the types of the
|
# NOTE: this only checks the layer geometry type, not the types of the
|
||||||
# actual geometries (layer.GetGeometryTypes()). This is probably equivalent
|
# actual geometries (layer.GetGeometryTypes()). This is probably equivalent
|
||||||
# in most cases, and it's more efficient than checking every geometry, but
|
# in most cases, and it's more efficient than checking every geometry, but
|
||||||
|
|
|
@ -1148,23 +1148,18 @@ def _copy_vector_or_raster(base_file_path, target_file_path):
|
||||||
ValueError if the base file can't be opened by GDAL.
|
ValueError if the base file can't be opened by GDAL.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# Open the file as raster first
|
gis_type = pygeoprocessing.get_gis_type(base_file_path)
|
||||||
source_dataset = gdal.OpenEx(base_file_path, gdal.OF_RASTER)
|
if gis_type == pygeoprocessing.RASTER_TYPE:
|
||||||
target_driver_name = _RASTER_DRIVER_NAME
|
source_dataset = gdal.OpenEx(base_file_path, gdal.OF_RASTER)
|
||||||
if source_dataset is None:
|
target_driver_name = _RASTER_DRIVER_NAME
|
||||||
# File didn't open as a raster; assume it's a vector
|
elif gis_type == pygeoprocessing.VECTOR_TYPE:
|
||||||
source_dataset = gdal.OpenEx(base_file_path, gdal.OF_VECTOR)
|
source_dataset = gdal.OpenEx(base_file_path, gdal.OF_VECTOR)
|
||||||
target_driver_name = _VECTOR_DRIVER_NAME
|
target_driver_name = _VECTOR_DRIVER_NAME
|
||||||
|
else:
|
||||||
# Raise an exception if the file can't be opened by GDAL
|
raise ValueError(f'File {base_file_path} is neither a GDAL-compatible '
|
||||||
if source_dataset is None:
|
'raster nor vector.')
|
||||||
raise ValueError(
|
|
||||||
'File %s is neither a GDAL-compatible raster nor vector.'
|
|
||||||
% base_file_path)
|
|
||||||
|
|
||||||
driver = gdal.GetDriverByName(target_driver_name)
|
driver = gdal.GetDriverByName(target_driver_name)
|
||||||
driver.CreateCopy(target_file_path, source_dataset)
|
driver.CreateCopy(target_file_path, source_dataset)
|
||||||
source_dataset = None
|
|
||||||
|
|
||||||
|
|
||||||
def _interpolate_vector_field_onto_raster(
|
def _interpolate_vector_field_onto_raster(
|
||||||
|
|
|
@ -1758,10 +1758,9 @@ def _calculate_land_to_grid_distance(
|
||||||
# Copy the point vector
|
# Copy the point vector
|
||||||
_, driver_name = _get_file_ext_and_driver_name(
|
_, driver_name = _get_file_ext_and_driver_name(
|
||||||
target_land_vector_path)
|
target_land_vector_path)
|
||||||
base_land_vector = ogr.Open(base_land_vector_path, gdal.OF_VECTOR)
|
gdal.VectorTranslate(
|
||||||
driver = ogr.GetDriverByName(driver_name)
|
target_land_vector_path, base_land_vector_path,
|
||||||
driver.CopyDataSource(base_land_vector, target_land_vector_path)
|
format=driver_name)
|
||||||
base_land_vector = None
|
|
||||||
|
|
||||||
target_land_vector = gdal.OpenEx(
|
target_land_vector = gdal.OpenEx(
|
||||||
target_land_vector_path, gdal.OF_VECTOR | gdal.GA_Update)
|
target_land_vector_path, gdal.OF_VECTOR | gdal.GA_Update)
|
||||||
|
@ -1846,7 +1845,7 @@ def _mask_by_distance(base_raster_path, min_dist, max_dist, out_nodata,
|
||||||
|
|
||||||
|
|
||||||
def _create_distance_raster(base_raster_path, base_vector_path,
|
def _create_distance_raster(base_raster_path, base_vector_path,
|
||||||
target_dist_raster_path, work_dir):
|
target_dist_raster_path, work_dir, where_clause=None):
|
||||||
"""Create and rasterize vector onto a raster, and calculate dist transform.
|
"""Create and rasterize vector onto a raster, and calculate dist transform.
|
||||||
|
|
||||||
Create a raster where the pixel values represent the euclidean distance to
|
Create a raster where the pixel values represent the euclidean distance to
|
||||||
|
@ -1858,6 +1857,9 @@ def _create_distance_raster(base_raster_path, base_vector_path,
|
||||||
base_vector_path (str): path to vector to be rasterized.
|
base_vector_path (str): path to vector to be rasterized.
|
||||||
target_dist_raster_path (str): path to raster with distance transform.
|
target_dist_raster_path (str): path to raster with distance transform.
|
||||||
work_dir (str): path to create a temp folder for saving files.
|
work_dir (str): path to create a temp folder for saving files.
|
||||||
|
where_clause (str): If not None, is an SQL query-like string to filter
|
||||||
|
which features are rasterized. This kwarg is passed to
|
||||||
|
``pygeoprocessing.rasterize``.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
None
|
None
|
||||||
|
@ -1885,7 +1887,8 @@ def _create_distance_raster(base_raster_path, base_vector_path,
|
||||||
base_vector_path,
|
base_vector_path,
|
||||||
rasterized_raster_path,
|
rasterized_raster_path,
|
||||||
burn_values=[1],
|
burn_values=[1],
|
||||||
option_list=["ALL_TOUCHED=TRUE"])
|
option_list=["ALL_TOUCHED=TRUE"],
|
||||||
|
where_clause=where_clause)
|
||||||
|
|
||||||
# Calculate euclidean distance transform
|
# Calculate euclidean distance transform
|
||||||
pygeoprocessing.distance_transform_edt(
|
pygeoprocessing.distance_transform_edt(
|
||||||
|
@ -2590,67 +2593,25 @@ def _calculate_distances_land_grid(base_point_vector_path, base_raster_path,
|
||||||
# A list to hold the land to grid distances in order for each point
|
# A list to hold the land to grid distances in order for each point
|
||||||
# features 'L2G' field
|
# features 'L2G' field
|
||||||
l2g_dist = []
|
l2g_dist = []
|
||||||
# A list to hold the individual distance transform path's in order
|
# A list to hold the individual distance transform paths in order
|
||||||
land_point_dist_raster_path_list = []
|
land_point_dist_raster_path_list = []
|
||||||
|
|
||||||
# Get the original layer definition which holds needed attribute values
|
fid_field = base_point_layer.GetFIDColumn()
|
||||||
base_layer_defn = base_point_layer.GetLayerDefn()
|
if not fid_field:
|
||||||
file_ext, driver_name = _get_file_ext_and_driver_name(
|
fid_field = 'FID'
|
||||||
base_point_vector_path)
|
|
||||||
output_driver = ogr.GetDriverByName(driver_name)
|
|
||||||
single_feature_vector_path = os.path.join(
|
|
||||||
temp_dir, 'single_feature' + file_ext)
|
|
||||||
target_vector = output_driver.CreateDataSource(single_feature_vector_path)
|
|
||||||
|
|
||||||
# Create the new layer for target_vector using same name and
|
|
||||||
# geometry type from base_vector as well as spatial reference
|
|
||||||
target_layer = target_vector.CreateLayer(base_layer_defn.GetName(),
|
|
||||||
base_point_layer.GetSpatialRef(),
|
|
||||||
base_layer_defn.GetGeomType())
|
|
||||||
|
|
||||||
# Get the number of fields in original_layer
|
|
||||||
base_field_count = base_layer_defn.GetFieldCount()
|
|
||||||
|
|
||||||
# For every field, create a duplicate field and add it to the new
|
|
||||||
# shapefiles layer
|
|
||||||
for fld_index in range(base_field_count):
|
|
||||||
base_field = base_layer_defn.GetFieldDefn(fld_index)
|
|
||||||
target_field = ogr.FieldDefn(base_field.GetName(),
|
|
||||||
base_field.GetType())
|
|
||||||
# NOT setting the WIDTH or PRECISION because that seems to be
|
|
||||||
# unneeded and causes interesting OGR conflicts
|
|
||||||
target_layer.CreateField(target_field)
|
|
||||||
|
|
||||||
# Create a new shapefile with only one feature to burn onto a raster
|
# Create a new shapefile with only one feature to burn onto a raster
|
||||||
# in order to get the distance transform based on that one feature
|
# in order to get the distance transform based on that one feature
|
||||||
for feature_index, point_feature in enumerate(base_point_layer):
|
for feature_index, point_feature in enumerate(base_point_layer):
|
||||||
# Get the point features land to grid value and add it to the list
|
# Get the point features land to grid value and add it to the list
|
||||||
field_index = point_feature.GetFieldIndex('L2G')
|
l2g_dist.append(float(point_feature.GetField('L2G')))
|
||||||
l2g_dist.append(float(point_feature.GetField(field_index)))
|
|
||||||
|
|
||||||
# Copy original_datasource's feature and set as new shapes feature
|
dist_raster_path = os.path.join(temp_dir, f'dist_{feature_index}.tif')
|
||||||
output_feature = ogr.Feature(feature_def=target_layer.GetLayerDefn())
|
_create_distance_raster(
|
||||||
|
base_raster_path, base_point_vector_path, dist_raster_path,
|
||||||
# Since the original feature is of interest add its fields and
|
work_dir, where_clause=f'{fid_field}={point_feature.GetFID()}')
|
||||||
# Values to the new feature from the intersecting geometries
|
|
||||||
# The False in SetFrom() signifies that the fields must match
|
|
||||||
# exactly
|
|
||||||
output_feature.SetFrom(point_feature, False)
|
|
||||||
target_layer.CreateFeature(output_feature)
|
|
||||||
target_vector.SyncToDisk()
|
|
||||||
target_layer.DeleteFeature(point_feature.GetFID())
|
|
||||||
|
|
||||||
dist_raster_path = os.path.join(temp_dir,
|
|
||||||
'dist_%s.tif' % feature_index)
|
|
||||||
_create_distance_raster(base_raster_path, single_feature_vector_path,
|
|
||||||
dist_raster_path, work_dir)
|
|
||||||
# Add each features distance transform result to list
|
# Add each features distance transform result to list
|
||||||
land_point_dist_raster_path_list.append(dist_raster_path)
|
land_point_dist_raster_path_list.append(dist_raster_path)
|
||||||
|
|
||||||
target_layer = None
|
|
||||||
target_vector = None
|
|
||||||
base_point_layer = None
|
|
||||||
base_point_vector = None
|
|
||||||
l2g_dist_array = numpy.array(l2g_dist)
|
l2g_dist_array = numpy.array(l2g_dist)
|
||||||
|
|
||||||
def _min_land_ocean_dist(*grid_distances):
|
def _min_land_ocean_dist(*grid_distances):
|
||||||
|
|
|
@ -6,13 +6,14 @@ import os
|
||||||
|
|
||||||
import pandas
|
import pandas
|
||||||
import numpy
|
import numpy
|
||||||
|
from osgeo import gdal
|
||||||
import pygeoprocessing
|
import pygeoprocessing
|
||||||
|
|
||||||
|
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'annual_water_yield')
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'annual_water_yield')
|
||||||
SAMPLE_DATA = os.path.join(REGRESSION_DATA, 'input')
|
SAMPLE_DATA = os.path.join(REGRESSION_DATA, 'input')
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
class AnnualWaterYieldTests(unittest.TestCase):
|
class AnnualWaterYieldTests(unittest.TestCase):
|
||||||
"""Regression Tests for Annual Water Yield Model."""
|
"""Regression Tests for Annual Water Yield Model."""
|
||||||
|
|
|
@ -12,6 +12,7 @@ import numpy.random
|
||||||
import numpy.testing
|
import numpy.testing
|
||||||
import pygeoprocessing
|
import pygeoprocessing
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
def make_simple_raster(base_raster_path, fill_val, nodata_val):
|
def make_simple_raster(base_raster_path, fill_val, nodata_val):
|
||||||
"""Create a 10x10 raster on designated path with fill value.
|
"""Create a 10x10 raster on designated path with fill value.
|
||||||
|
|
|
@ -10,12 +10,13 @@ import json
|
||||||
import importlib
|
import importlib
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
|
from osgeo import gdal
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def redirect_stdout():
|
def redirect_stdout():
|
||||||
|
|
|
@ -17,6 +17,7 @@ from natcap.invest import validation
|
||||||
from osgeo import gdal
|
from osgeo import gdal
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||||
'coastal_blue_carbon')
|
'coastal_blue_carbon')
|
||||||
|
|
|
@ -20,6 +20,7 @@ from shapely.geometry import MultiPolygon
|
||||||
from shapely.geometry import Point
|
from shapely.geometry import Point
|
||||||
from shapely.geometry import Polygon
|
from shapely.geometry import Polygon
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||||
'coastal_vulnerability')
|
'coastal_vulnerability')
|
||||||
|
@ -931,14 +932,8 @@ class CoastalVulnerabilityTests(unittest.TestCase):
|
||||||
REGRESSION_DATA, 'coastal_exposure.gpkg')
|
REGRESSION_DATA, 'coastal_exposure.gpkg')
|
||||||
|
|
||||||
# This input gets modified in place, so first copy to working dir
|
# This input gets modified in place, so first copy to working dir
|
||||||
# I'm using GPKG driver to copy because that driver may have problems
|
base_shore_point_vector = gdal.OpenEx(base_vector_path, gdal.OF_VECTOR)
|
||||||
# updating a file created by a different GPKG driver version, and the
|
gdal.VectorTranslate(target_point_vector_path, base_shore_point_vector)
|
||||||
# version used is dependent on GDAL version.
|
|
||||||
# https://gdal.org/drivers/vector/gpkg.html
|
|
||||||
base_shore_point_vector = ogr.Open(base_vector_path)
|
|
||||||
gpkg_driver = ogr.GetDriverByName('GPKG')
|
|
||||||
gpkg_driver.CopyDataSource(
|
|
||||||
base_shore_point_vector, target_point_vector_path)
|
|
||||||
|
|
||||||
coastal_vulnerability.calculate_final_risk(
|
coastal_vulnerability.calculate_final_risk(
|
||||||
target_point_vector_path, target_point_csv_path)
|
target_point_vector_path, target_point_csv_path)
|
||||||
|
@ -968,17 +963,9 @@ class CoastalVulnerabilityTests(unittest.TestCase):
|
||||||
# This gpkg has a feature with an empty field value for 'R_slr'
|
# This gpkg has a feature with an empty field value for 'R_slr'
|
||||||
# The function modifies the file in place, so copy to test workspace
|
# The function modifies the file in place, so copy to test workspace
|
||||||
# first.
|
# first.
|
||||||
|
|
||||||
# I'm using GPKG driver to copy because that driver may have problems
|
|
||||||
# updating a file created by a different GPKG driver version, and the
|
|
||||||
# version used is dependent on GDAL version.
|
|
||||||
# https://gdal.org/drivers/vector/gpkg.html
|
|
||||||
base_vector_path = os.path.join(
|
base_vector_path = os.path.join(
|
||||||
REGRESSION_DATA, 'test_missing_values.gpkg')
|
REGRESSION_DATA, 'test_missing_values.gpkg')
|
||||||
base_shore_point_vector = ogr.Open(base_vector_path)
|
gdal.VectorTranslate(target_vector_path, base_vector_path)
|
||||||
gpkg_driver = ogr.GetDriverByName('GPKG')
|
|
||||||
gpkg_driver.CopyDataSource(
|
|
||||||
base_shore_point_vector, target_vector_path)
|
|
||||||
|
|
||||||
coastal_vulnerability.calculate_final_risk(
|
coastal_vulnerability.calculate_final_risk(
|
||||||
target_vector_path, target_csv_path)
|
target_vector_path, target_csv_path)
|
||||||
|
|
|
@ -9,6 +9,7 @@ from osgeo import gdal
|
||||||
import pandas
|
import pandas
|
||||||
import pygeoprocessing
|
import pygeoprocessing
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
MODEL_DATA_PATH = os.path.join(
|
MODEL_DATA_PATH = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||||
'crop_production_model', 'model_data')
|
'crop_production_model', 'model_data')
|
||||||
|
|
|
@ -18,6 +18,7 @@ import shapely.geometry
|
||||||
from osgeo import gdal
|
from osgeo import gdal
|
||||||
from osgeo import ogr
|
from osgeo import ogr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
_TEST_FILE_CWD = os.path.dirname(os.path.abspath(__file__))
|
_TEST_FILE_CWD = os.path.dirname(os.path.abspath(__file__))
|
||||||
DATA_DIR = os.path.join(_TEST_FILE_CWD,
|
DATA_DIR = os.path.join(_TEST_FILE_CWD,
|
||||||
'..', 'data', 'invest-test-data', 'data_stack')
|
'..', 'data', 'invest-test-data', 'data_stack')
|
||||||
|
|
|
@ -19,6 +19,7 @@ from shapely.geometry import box
|
||||||
from shapely.geometry import MultiPoint
|
from shapely.geometry import MultiPoint
|
||||||
from shapely.geometry import Point
|
from shapely.geometry import Point
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||||
'delineateit')
|
'delineateit')
|
||||||
|
|
|
@ -7,7 +7,7 @@ import os
|
||||||
from osgeo import gdal
|
from osgeo import gdal
|
||||||
import numpy
|
import numpy
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||||
'forest_carbon_edge_effect')
|
'forest_carbon_edge_effect')
|
||||||
|
|
|
@ -12,6 +12,7 @@ from osgeo import ogr
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
from shapely.geometry import Polygon
|
from shapely.geometry import Polygon
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
def make_raster_from_array(
|
def make_raster_from_array(
|
||||||
base_array, base_raster_path, nodata_val=-1, gdal_type=gdal.GDT_Int32):
|
base_array, base_raster_path, nodata_val=-1, gdal_type=gdal.GDT_Int32):
|
||||||
|
|
|
@ -18,6 +18,7 @@ from osgeo import gdal
|
||||||
from osgeo import ogr
|
from osgeo import ogr
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
ORIGIN = (1180000.0, 690000.0)
|
ORIGIN = (1180000.0, 690000.0)
|
||||||
_SRS = osr.SpatialReference()
|
_SRS = osr.SpatialReference()
|
||||||
_SRS.ImportFromEPSG(26910) # UTM zone 10N
|
_SRS.ImportFromEPSG(26910) # UTM zone 10N
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
import unittest
|
import unittest
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from osgeo import gdal
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
class FileRegistryTests(unittest.TestCase):
|
class FileRegistryTests(unittest.TestCase):
|
||||||
"""Tests for the InVEST file registry builder."""
|
"""Tests for the InVEST file registry builder."""
|
||||||
|
|
|
@ -4,7 +4,9 @@ import unittest
|
||||||
|
|
||||||
import pint
|
import pint
|
||||||
from natcap.invest.model_metadata import MODEL_METADATA
|
from natcap.invest.model_metadata import MODEL_METADATA
|
||||||
|
from osgeo import gdal
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
valid_nested_types = {
|
valid_nested_types = {
|
||||||
None: { # if no parent type (arg is top-level), then all types are valid
|
None: { # if no parent type (arg is top-level), then all types are valid
|
||||||
'boolean',
|
'boolean',
|
||||||
|
|
|
@ -12,6 +12,7 @@ from osgeo import gdal
|
||||||
from osgeo import ogr
|
from osgeo import ogr
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'ndr')
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'ndr')
|
||||||
|
|
||||||
|
|
|
@ -7,9 +7,11 @@ import unittest
|
||||||
import numpy
|
import numpy
|
||||||
import pygeoprocessing
|
import pygeoprocessing
|
||||||
import shapely.geometry
|
import shapely.geometry
|
||||||
|
from osgeo import gdal
|
||||||
from osgeo import ogr
|
from osgeo import ogr
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'pollination')
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'pollination')
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,7 @@ import warnings
|
||||||
|
|
||||||
from natcap.invest import utils
|
from natcap.invest import utils
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
Pyro4.config.SERIALIZER = 'marshal' # allow null bytes in strings
|
Pyro4.config.SERIALIZER = 'marshal' # allow null bytes in strings
|
||||||
|
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
|
|
|
@ -9,6 +9,7 @@ import numpy
|
||||||
from osgeo import gdal
|
from osgeo import gdal
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
class RouteDEMTests(unittest.TestCase):
|
class RouteDEMTests(unittest.TestCase):
|
||||||
"""Tests for RouteDEM with Pygeoprocessing 1.x routing API."""
|
"""Tests for RouteDEM with Pygeoprocessing 1.x routing API."""
|
||||||
|
|
|
@ -5,7 +5,9 @@ import shutil
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pandas
|
import pandas
|
||||||
|
from osgeo import gdal
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
TEST_DATA_DIR = os.path.join(
|
TEST_DATA_DIR = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||||
'scenario_gen_proximity')
|
'scenario_gen_proximity')
|
||||||
|
|
|
@ -14,6 +14,7 @@ from shapely.geometry import LineString
|
||||||
from shapely.geometry import Point
|
from shapely.geometry import Point
|
||||||
from shapely.geometry import Polygon
|
from shapely.geometry import Polygon
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
_SRS = osr.SpatialReference()
|
_SRS = osr.SpatialReference()
|
||||||
_SRS.ImportFromEPSG(32731) # WGS84 / UTM zone 31s
|
_SRS.ImportFromEPSG(32731) # WGS84 / UTM zone 31s
|
||||||
WKT = _SRS.ExportToWkt()
|
WKT = _SRS.ExportToWkt()
|
||||||
|
|
|
@ -9,6 +9,7 @@ import pygeoprocessing
|
||||||
from osgeo import gdal
|
from osgeo import gdal
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'sdr')
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'sdr')
|
||||||
SAMPLE_DATA = os.path.join(REGRESSION_DATA, 'input')
|
SAMPLE_DATA = os.path.join(REGRESSION_DATA, 'input')
|
||||||
|
|
|
@ -10,6 +10,7 @@ from osgeo import gdal
|
||||||
from osgeo import ogr
|
from osgeo import ogr
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data',
|
||||||
'seasonal_water_yield')
|
'seasonal_water_yield')
|
||||||
|
@ -165,6 +166,30 @@ def make_precip_rasters(precip_dir_path):
|
||||||
make_raster_from_array(precip_array, precip_raster_path)
|
make_raster_from_array(precip_array, precip_raster_path)
|
||||||
|
|
||||||
|
|
||||||
|
def make_zeropadded_rasters(dir_path, prefix):
|
||||||
|
"""Make twelve 1x1 raster files with filenames ending in zero-padded
|
||||||
|
month number.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dir_path (str): path to the directory for saving the rasters.
|
||||||
|
file_prefix (str): prefix of new files to create.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: monthly raster filenames
|
||||||
|
"""
|
||||||
|
size = 1
|
||||||
|
monthly_raster_list = []
|
||||||
|
|
||||||
|
for month in range(1, 13):
|
||||||
|
raster_path = os.path.join(
|
||||||
|
dir_path, prefix + str(month).zfill(2) + '.tif')
|
||||||
|
temp_array = numpy.full((size, size), 1, dtype=numpy.int8)
|
||||||
|
make_raster_from_array(temp_array, raster_path)
|
||||||
|
monthly_raster_list.append(raster_path)
|
||||||
|
|
||||||
|
return monthly_raster_list
|
||||||
|
|
||||||
|
|
||||||
def make_recharge_raster(recharge_ras_path):
|
def make_recharge_raster(recharge_ras_path):
|
||||||
"""Make a 100x100 raster of user defined recharge.
|
"""Make a 100x100 raster of user defined recharge.
|
||||||
|
|
||||||
|
@ -317,6 +342,61 @@ class SeasonalWaterYieldUnusualDataTests(unittest.TestCase):
|
||||||
"""Delete workspace after test is done."""
|
"""Delete workspace after test is done."""
|
||||||
shutil.rmtree(self.workspace_dir, ignore_errors=True)
|
shutil.rmtree(self.workspace_dir, ignore_errors=True)
|
||||||
|
|
||||||
|
def test_zeropadded_monthly_filenames(self):
|
||||||
|
"""test filenames with zero-padded months in
|
||||||
|
_get_monthly_file_lists function
|
||||||
|
"""
|
||||||
|
from natcap.invest.seasonal_water_yield.seasonal_water_yield import _get_monthly_file_lists
|
||||||
|
|
||||||
|
n_months = 12
|
||||||
|
|
||||||
|
# Make directory and file names with zero-padded months
|
||||||
|
test_precip_dir_path = os.path.join(self.workspace_dir,
|
||||||
|
'test_0pad_precip_dir')
|
||||||
|
os.makedirs(test_precip_dir_path)
|
||||||
|
precip_file_list = make_zeropadded_rasters(test_precip_dir_path, 'Prcp')
|
||||||
|
|
||||||
|
test_eto_dir_path = os.path.join(self.workspace_dir,
|
||||||
|
'test_0pad_eto_dir')
|
||||||
|
os.makedirs(test_eto_dir_path)
|
||||||
|
eto_file_list = make_zeropadded_rasters(test_eto_dir_path, 'et0_')
|
||||||
|
|
||||||
|
# Create list of monthly files for data_type
|
||||||
|
eto_path_list = _get_monthly_file_lists(
|
||||||
|
n_months, test_eto_dir_path)
|
||||||
|
|
||||||
|
precip_path_list = _get_monthly_file_lists(
|
||||||
|
n_months, test_precip_dir_path)
|
||||||
|
|
||||||
|
# Verify that the returned lists match the input
|
||||||
|
self.assertEqual(precip_path_list, precip_file_list)
|
||||||
|
self.assertEqual(eto_path_list, eto_file_list)
|
||||||
|
|
||||||
|
def test_nonpadded_monthly_filenames(self):
|
||||||
|
"""test filenames without zero-padded months in
|
||||||
|
_get_monthly_file_lists function
|
||||||
|
"""
|
||||||
|
from natcap.invest.seasonal_water_yield.seasonal_water_yield import _get_monthly_file_lists
|
||||||
|
|
||||||
|
n_months = 12
|
||||||
|
|
||||||
|
# Make directory and file names with (non-zero-padded) months
|
||||||
|
precip_dir_path = os.path.join(self.workspace_dir, 'precip_dir')
|
||||||
|
os.makedirs(precip_dir_path)
|
||||||
|
make_precip_rasters(precip_dir_path)
|
||||||
|
|
||||||
|
precip_path_list = _get_monthly_file_lists(
|
||||||
|
n_months, precip_dir_path)
|
||||||
|
|
||||||
|
# Create lists of monthly filenames to which to compare function output
|
||||||
|
# Note this is hardcoded to match the filenames created in make_precip_rasters
|
||||||
|
match_precip = [os.path.join(precip_dir_path,
|
||||||
|
"precip_mm_" + str(m) + ".tif")
|
||||||
|
for m in range(1, n_months + 1)]
|
||||||
|
|
||||||
|
# Verify that the returned lists match the input
|
||||||
|
self.assertEqual(precip_path_list, match_precip)
|
||||||
|
|
||||||
def test_ambiguous_precip_data(self):
|
def test_ambiguous_precip_data(self):
|
||||||
"""SWY test case where there are more than 12 precipitation files."""
|
"""SWY test case where there are more than 12 precipitation files."""
|
||||||
from natcap.invest.seasonal_water_yield import seasonal_water_yield
|
from natcap.invest.seasonal_water_yield import seasonal_water_yield
|
||||||
|
|
|
@ -2,7 +2,9 @@ import unittest
|
||||||
|
|
||||||
from natcap.invest import spec_utils
|
from natcap.invest import spec_utils
|
||||||
from natcap.invest.unit_registry import u
|
from natcap.invest.unit_registry import u
|
||||||
|
from osgeo import gdal
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
class TestSpecUtils(unittest.TestCase):
|
class TestSpecUtils(unittest.TestCase):
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ import pygeoprocessing
|
||||||
from pygeoprocessing.geoprocessing_core import (
|
from pygeoprocessing.geoprocessing_core import (
|
||||||
DEFAULT_GTIFF_CREATION_TUPLE_OPTIONS as opts_tuple)
|
DEFAULT_GTIFF_CREATION_TUPLE_OPTIONS as opts_tuple)
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
TEST_DATA = os.path.join(os.path.dirname(
|
TEST_DATA = os.path.join(os.path.dirname(
|
||||||
__file__), '..', 'data', 'invest-test-data', 'stormwater')
|
__file__), '..', 'data', 'invest-test-data', 'stormwater')
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,9 @@ from unittest.mock import patch
|
||||||
from babel.messages import Catalog, mofile
|
from babel.messages import Catalog, mofile
|
||||||
import natcap.invest
|
import natcap.invest
|
||||||
from natcap.invest import validation
|
from natcap.invest import validation
|
||||||
|
from osgeo import gdal
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
TEST_LANG = 'll'
|
TEST_LANG = 'll'
|
||||||
|
|
||||||
# assign to local variable so that it won't be changed by translation
|
# assign to local variable so that it won't be changed by translation
|
||||||
|
|
|
@ -8,6 +8,7 @@ import numpy
|
||||||
import pandas
|
import pandas
|
||||||
from osgeo import gdal
|
from osgeo import gdal
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'ucm')
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'ucm')
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ from osgeo import gdal
|
||||||
from osgeo import ogr
|
from osgeo import ogr
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
class UFRMTests(unittest.TestCase):
|
class UFRMTests(unittest.TestCase):
|
||||||
"""Tests for the Urban Flood Risk Mitigation Model."""
|
"""Tests for the Urban Flood Risk Mitigation Model."""
|
||||||
|
|
|
@ -6,7 +6,9 @@ import unittest
|
||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from natcap.invest import ui_server
|
from natcap.invest import ui_server
|
||||||
|
from osgeo import gdal
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
TEST_DATA_PATH = os.path.join(
|
TEST_DATA_PATH = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data')
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data')
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@ from osgeo import gdal
|
||||||
from osgeo import ogr
|
from osgeo import ogr
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
_DEFAULT_ORIGIN = (444720, 3751320)
|
_DEFAULT_ORIGIN = (444720, 3751320)
|
||||||
_DEFAULT_PIXEL_SIZE = (30, -30)
|
_DEFAULT_PIXEL_SIZE = (30, -30)
|
||||||
_DEFAULT_EPSG = 3116
|
_DEFAULT_EPSG = 3116
|
||||||
|
@ -904,8 +905,14 @@ class UNATests(unittest.TestCase):
|
||||||
from natcap.invest import urban_nature_access
|
from natcap.invest import urban_nature_access
|
||||||
args = _build_model_args(self.workspace_dir)
|
args = _build_model_args(self.workspace_dir)
|
||||||
|
|
||||||
|
admin_vector = gdal.OpenEx(args['admin_boundaries_vector_path'])
|
||||||
|
admin_layer = admin_vector.GetLayer()
|
||||||
|
fid = admin_layer.GetNextFeature().GetFID()
|
||||||
|
admin_layer = None
|
||||||
|
admin_vector = None
|
||||||
|
|
||||||
feature_attrs = {
|
feature_attrs = {
|
||||||
0: {
|
fid: {
|
||||||
'my-field-1': float(1.2345),
|
'my-field-1': float(1.2345),
|
||||||
'my-field-2': numpy.float32(2.34567),
|
'my-field-2': numpy.float32(2.34567),
|
||||||
'my-field-3': numpy.float64(3.45678),
|
'my-field-3': numpy.float64(3.45678),
|
||||||
|
@ -924,10 +931,10 @@ class UNATests(unittest.TestCase):
|
||||||
vector = gdal.OpenEx(target_vector_path)
|
vector = gdal.OpenEx(target_vector_path)
|
||||||
self.assertEqual(vector.GetLayerCount(), 1)
|
self.assertEqual(vector.GetLayerCount(), 1)
|
||||||
layer = vector.GetLayer()
|
layer = vector.GetLayer()
|
||||||
self.assertEqual(len(layer.schema), len(feature_attrs[0]))
|
self.assertEqual(len(layer.schema), len(feature_attrs[fid]))
|
||||||
self.assertEqual(layer.GetFeatureCount(), 1)
|
self.assertEqual(layer.GetFeatureCount(), 1)
|
||||||
feature = layer.GetFeature(0)
|
feature = layer.GetFeature(fid)
|
||||||
for field_name, expected_field_value in feature_attrs[0].items():
|
for field_name, expected_field_value in feature_attrs[fid].items():
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
feature.GetField(field_name), expected_field_value)
|
feature.GetField(field_name), expected_field_value)
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -13,6 +13,7 @@ import shapely.geometry
|
||||||
import numpy
|
import numpy
|
||||||
import numpy.testing
|
import numpy.testing
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
class UsageLoggingTests(unittest.TestCase):
|
class UsageLoggingTests(unittest.TestCase):
|
||||||
"""Tests for the InVEST usage logging framework."""
|
"""Tests for the InVEST usage logging framework."""
|
||||||
|
|
|
@ -25,6 +25,7 @@ from osgeo import osr
|
||||||
from shapely.geometry import Point
|
from shapely.geometry import Point
|
||||||
from shapely.geometry import Polygon
|
from shapely.geometry import Polygon
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
class SuffixUtilsTests(unittest.TestCase):
|
class SuffixUtilsTests(unittest.TestCase):
|
||||||
"""Tests for natcap.invest.utils.make_suffix_string."""
|
"""Tests for natcap.invest.utils.make_suffix_string."""
|
||||||
|
@ -412,17 +413,22 @@ class GDALWarningsLoggingTests(unittest.TestCase):
|
||||||
|
|
||||||
logfile = os.path.join(self.workspace, 'logfile.txt')
|
logfile = os.path.join(self.workspace, 'logfile.txt')
|
||||||
|
|
||||||
# this warning should go to stdout.
|
invalid_polygon = ogr.CreateGeometryFromWkt(
|
||||||
gdal.Open('this_file_should_not_exist.tif')
|
'POLYGON ((-20 -20, -16 -20, -20 -16, -16 -16, -20 -20))')
|
||||||
|
|
||||||
|
# This produces a GDAL warning that does not raise an
|
||||||
|
# exception with UseExceptions(). Without capture_gdal_logging,
|
||||||
|
# it will be printed directly to stderr
|
||||||
|
invalid_polygon.IsValid()
|
||||||
|
|
||||||
with utils.log_to_file(logfile) as handler:
|
with utils.log_to_file(logfile) as handler:
|
||||||
with utils.capture_gdal_logging():
|
with utils.capture_gdal_logging():
|
||||||
# warning should be captured.
|
# warning should be captured.
|
||||||
gdal.Open('file_file_should_also_not_exist.tif')
|
invalid_polygon.IsValid()
|
||||||
handler.flush()
|
handler.flush()
|
||||||
|
|
||||||
# warning should go to stdout
|
# warning should go to stderr
|
||||||
gdal.Open('this_file_should_not_exist.tif')
|
invalid_polygon.IsValid()
|
||||||
|
|
||||||
with open(logfile) as opened_logfile:
|
with open(logfile) as opened_logfile:
|
||||||
messages = [msg for msg in opened_logfile.read().split('\n')
|
messages = [msg for msg in opened_logfile.read().split('\n')
|
||||||
|
@ -499,7 +505,11 @@ class PrepareWorkspaceTests(unittest.TestCase):
|
||||||
with utils.prepare_workspace(workspace,
|
with utils.prepare_workspace(workspace,
|
||||||
'some_model'):
|
'some_model'):
|
||||||
warnings.warn('deprecated', UserWarning)
|
warnings.warn('deprecated', UserWarning)
|
||||||
gdal.Open('file should not exist')
|
invalid_polygon = ogr.CreateGeometryFromWkt(
|
||||||
|
'POLYGON ((-20 -20, -16 -20, -20 -16, -16 -16, -20 -20))')
|
||||||
|
# This produces a GDAL warning that does not raise an
|
||||||
|
# exception with UseExceptions()
|
||||||
|
invalid_polygon.IsValid()
|
||||||
|
|
||||||
self.assertTrue(os.path.exists(workspace))
|
self.assertTrue(os.path.exists(workspace))
|
||||||
logfile_glob = glob.glob(os.path.join(workspace, '*.txt'))
|
logfile_glob = glob.glob(os.path.join(workspace, '*.txt'))
|
||||||
|
@ -509,11 +519,9 @@ class PrepareWorkspaceTests(unittest.TestCase):
|
||||||
with open(logfile_glob[0]) as logfile:
|
with open(logfile_glob[0]) as logfile:
|
||||||
logfile_text = logfile.read()
|
logfile_text = logfile.read()
|
||||||
# all the following strings should be in the logfile.
|
# all the following strings should be in the logfile.
|
||||||
expected_string = (
|
self.assertTrue( # gdal logging captured
|
||||||
'file should not exist: No such file or directory')
|
'Self-intersection at or near point -18 -18' in logfile_text)
|
||||||
self.assertTrue(
|
self.assertEqual(len(re.findall('WARNING', logfile_text)), 2)
|
||||||
expected_string in logfile_text) # gdal error captured
|
|
||||||
self.assertEqual(len(re.findall('WARNING', logfile_text)), 1)
|
|
||||||
self.assertTrue('Elapsed time:' in logfile_text)
|
self.assertTrue('Elapsed time:' in logfile_text)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -18,6 +18,7 @@ from osgeo import gdal
|
||||||
from osgeo import ogr
|
from osgeo import ogr
|
||||||
from osgeo import osr
|
from osgeo import osr
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
|
|
||||||
class SpatialOverlapTest(unittest.TestCase):
|
class SpatialOverlapTest(unittest.TestCase):
|
||||||
"""Test Spatial Overlap."""
|
"""Test Spatial Overlap."""
|
||||||
|
|
|
@ -18,6 +18,7 @@ from shapely.geometry import Point
|
||||||
from natcap.invest import utils
|
from natcap.invest import utils
|
||||||
import pygeoprocessing
|
import pygeoprocessing
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
REGRESSION_DATA = os.path.join(
|
REGRESSION_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'wave_energy')
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'wave_energy')
|
||||||
SAMPLE_DATA = os.path.join(REGRESSION_DATA, 'input')
|
SAMPLE_DATA = os.path.join(REGRESSION_DATA, 'input')
|
||||||
|
|
|
@ -17,6 +17,7 @@ from osgeo import osr
|
||||||
|
|
||||||
import pygeoprocessing
|
import pygeoprocessing
|
||||||
|
|
||||||
|
gdal.UseExceptions()
|
||||||
SAMPLE_DATA = os.path.join(
|
SAMPLE_DATA = os.path.join(
|
||||||
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'wind_energy',
|
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'wind_energy',
|
||||||
'input')
|
'input')
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "invest-workbench",
|
"name": "invest-workbench",
|
||||||
"version": "0.1.0",
|
"version": "3.14.2",
|
||||||
"description": "Models that map and value the goods and services from nature that sustain and fulfill human life",
|
"description": "Models that map and value the goods and services from nature that sustain and fulfill human life",
|
||||||
"main": "build/main/main.js",
|
"main": "build/main/main.js",
|
||||||
"homepage": "./",
|
"homepage": "./",
|
||||||
|
|
|
@ -121,18 +121,24 @@ These instructions assume you have defined the two-letter locale code in an envi
|
||||||
git add src/main/i18n/$LL.json src/renderer/i18n/$LL.json
|
git add src/main/i18n/$LL.json src/renderer/i18n/$LL.json
|
||||||
git commit -m "add new messages into $LL translation files"
|
git commit -m "add new messages into $LL translation files"
|
||||||
```
|
```
|
||||||
3. (if the translator uses PO format) Convert JSON to PO
|
3. Convert JSON to PO using [i18next-gettext-converter](https://github.com/i18next/i18next-gettext-converter):
|
||||||
|
```
|
||||||
|
i18next-conv -l $LL -s src/main/i18n/$LL.json -t src/main/i18n/$LL.po
|
||||||
|
i18next-conv -l $LL -s src/renderer/i18n/$LL.json -t src/renderer/i18n/$LL.po
|
||||||
|
|
||||||
4. Send `src/main/i18n/$LL.[json,po]` and `src/renderer/i18n/$LL.[json,po]` to the translator and wait to receive a copy with translations added.
|
```
|
||||||
|
|
||||||
5. (if the translator uses PO format) Convert PO to JSON
|
4. Send `src/main/i18n/$LL.po` and `src/renderer/i18n/$LL.po` to the translator and wait to receive a copy with translations added.
|
||||||
If the translator works with PO files, we can convert them to JSON using this tool: https://github.com/i18next/i18next-gettext-converter
|
|
||||||
|
|
||||||
6. Replace `src/main/i18n/$LL.[json,po]` and `src/renderer/i18n/$LL.json` with the updated versions received from the translator
|
5. Receive the updated PO files from the translator, then convert PO to JSON using [i18next-gettext-converter](https://github.com/i18next/i18next-gettext-converter). Replace `src/main/i18n/$LL.json` and `src/renderer/i18n/$LL.json` with the updated versions:
|
||||||
|
```
|
||||||
|
i18next-conv -l $LL -s new_main_translations.po -t src/main/i18n/$LL.json
|
||||||
|
i18next-conv -l $LL -s new_renderer_translations.po -t src/renderer/i18n/$LL.json
|
||||||
|
|
||||||
7. Commit the changes:
|
```
|
||||||
|
|
||||||
|
6. Commit the changes:
|
||||||
```
|
```
|
||||||
git add src/main/i18n/$LL.json src/renderer/i18n/$LL.json
|
git add src/main/i18n/$LL.json src/renderer/i18n/$LL.json
|
||||||
git commit -m "add new translations for $LL"
|
git commit -m "add new translations for $LL"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
{
|
{
|
||||||
"File": "",
|
"File": "Archivo",
|
||||||
"Edit": "",
|
"Edit": "Edición",
|
||||||
"Speech": "",
|
"Speech": "Dicción",
|
||||||
"View": "",
|
"View": "Vista",
|
||||||
"Window": "",
|
"Window": "Ventana",
|
||||||
"About": "",
|
"About": "Acerca de",
|
||||||
"About InVEST": "",
|
"About InVEST": "Acerca de InVEST",
|
||||||
"Report a problem": ""
|
"Report a problem": "Informe de un problema"
|
||||||
}
|
}
|
|
@ -1,10 +1,10 @@
|
||||||
{
|
{
|
||||||
"File": "文件",
|
"File": "文件",
|
||||||
"Edit": "编辑",
|
"Edit": "编辑",
|
||||||
"Speech": "语音输入",
|
"Speech": "语音输入",
|
||||||
"View": "视图",
|
"View": "视图",
|
||||||
"Window": "界面",
|
"Window": "界面",
|
||||||
"About": "关于",
|
"About": "关于",
|
||||||
"About InVEST": "关于 InVEST",
|
"About InVEST": "关于 InVEST",
|
||||||
"Report a problem": "报告一个问题"
|
"Report a problem": "报告一个问题"
|
||||||
}
|
}
|
|
@ -1,4 +1,5 @@
|
||||||
export const ipcMainChannels = {
|
export const ipcMainChannels = {
|
||||||
|
BASE_URL: 'base-url',
|
||||||
CHANGE_LANGUAGE: 'change-language',
|
CHANGE_LANGUAGE: 'change-language',
|
||||||
CHECK_FILE_PERMISSIONS: 'check-file-permissions',
|
CHECK_FILE_PERMISSIONS: 'check-file-permissions',
|
||||||
CHECK_STORAGE_TOKEN: 'check-storage-token',
|
CHECK_STORAGE_TOKEN: 'check-storage-token',
|
||||||
|
@ -12,6 +13,7 @@ export const ipcMainChannels = {
|
||||||
INVEST_RUN: 'invest-run',
|
INVEST_RUN: 'invest-run',
|
||||||
INVEST_VERSION: 'invest-version',
|
INVEST_VERSION: 'invest-version',
|
||||||
IS_FIRST_RUN: 'is-first-run',
|
IS_FIRST_RUN: 'is-first-run',
|
||||||
|
IS_NEW_VERSION: 'is-new-version',
|
||||||
LOGGER: 'logger',
|
LOGGER: 'logger',
|
||||||
OPEN_EXTERNAL_URL: 'open-external-url',
|
OPEN_EXTERNAL_URL: 'open-external-url',
|
||||||
OPEN_PATH: 'open-path',
|
OPEN_PATH: 'open-path',
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
import i18n from './i18n/i18n';
|
||||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||||
import {
|
import {
|
||||||
app,
|
app,
|
||||||
|
@ -9,35 +9,37 @@ import {
|
||||||
ipcMain
|
ipcMain
|
||||||
} from 'electron';
|
} from 'electron';
|
||||||
|
|
||||||
|
import BASE_URL from './baseUrl';
|
||||||
import {
|
import {
|
||||||
createPythonFlaskProcess,
|
createPythonFlaskProcess,
|
||||||
getFlaskIsReady,
|
getFlaskIsReady,
|
||||||
shutdownPythonProcess
|
shutdownPythonProcess,
|
||||||
} from './createPythonFlaskProcess';
|
} from './createPythonFlaskProcess';
|
||||||
import findInvestBinaries from './findInvestBinaries';
|
import findInvestBinaries from './findInvestBinaries';
|
||||||
import setupDownloadHandlers from './setupDownloadHandlers';
|
import { ipcMainChannels } from './ipcMainChannels';
|
||||||
import setupDialogs from './setupDialogs';
|
import ELECTRON_DEV_MODE from './isDevMode';
|
||||||
import setupContextMenu from './setupContextMenu';
|
import { getLogger } from './logger';
|
||||||
|
import menuTemplate from './menubar';
|
||||||
|
import pkg from '../../package.json';
|
||||||
|
import { settingsStore, setupSettingsHandlers } from './settingsStore';
|
||||||
|
import { setupBaseUrl } from './setupBaseUrl';
|
||||||
import setupCheckFilePermissions from './setupCheckFilePermissions';
|
import setupCheckFilePermissions from './setupCheckFilePermissions';
|
||||||
import { setupCheckFirstRun } from './setupCheckFirstRun';
|
import { setupCheckFirstRun } from './setupCheckFirstRun';
|
||||||
import { setupCheckStorageToken } from './setupCheckStorageToken';
|
import { setupCheckStorageToken } from './setupCheckStorageToken';
|
||||||
import {
|
import setupContextMenu from './setupContextMenu';
|
||||||
setupInvestRunHandlers,
|
import setupDialogs from './setupDialogs';
|
||||||
setupInvestLogReaderHandler
|
import setupDownloadHandlers from './setupDownloadHandlers';
|
||||||
} from './setupInvestHandlers';
|
import setupGetElectronPaths from './setupGetElectronPaths';
|
||||||
import setupGetNCPUs from './setupGetNCPUs';
|
import setupGetNCPUs from './setupGetNCPUs';
|
||||||
|
import {
|
||||||
|
setupInvestLogReaderHandler,
|
||||||
|
setupInvestRunHandlers,
|
||||||
|
} from './setupInvestHandlers';
|
||||||
|
import { setupIsNewVersion } from './setupIsNewVersion';
|
||||||
import setupOpenExternalUrl from './setupOpenExternalUrl';
|
import setupOpenExternalUrl from './setupOpenExternalUrl';
|
||||||
import setupOpenLocalHtml from './setupOpenLocalHtml';
|
import setupOpenLocalHtml from './setupOpenLocalHtml';
|
||||||
import { settingsStore, setupSettingsHandlers } from './settingsStore';
|
|
||||||
import setupGetElectronPaths from './setupGetElectronPaths';
|
|
||||||
import setupRendererLogger from './setupRendererLogger';
|
import setupRendererLogger from './setupRendererLogger';
|
||||||
import { ipcMainChannels } from './ipcMainChannels';
|
|
||||||
import menuTemplate from './menubar';
|
|
||||||
import ELECTRON_DEV_MODE from './isDevMode';
|
|
||||||
import BASE_URL from './baseUrl';
|
|
||||||
import { getLogger } from './logger';
|
|
||||||
import i18n from './i18n/i18n';
|
|
||||||
import pkg from '../../package.json';
|
|
||||||
|
|
||||||
const logger = getLogger(__filename.split('/').slice(-1)[0]);
|
const logger = getLogger(__filename.split('/').slice(-1)[0]);
|
||||||
|
|
||||||
|
@ -87,6 +89,7 @@ export const createWindow = async () => {
|
||||||
setupDialogs();
|
setupDialogs();
|
||||||
setupCheckFilePermissions();
|
setupCheckFilePermissions();
|
||||||
setupCheckFirstRun();
|
setupCheckFirstRun();
|
||||||
|
setupIsNewVersion();
|
||||||
setupCheckStorageToken();
|
setupCheckStorageToken();
|
||||||
setupSettingsHandlers();
|
setupSettingsHandlers();
|
||||||
setupGetElectronPaths();
|
setupGetElectronPaths();
|
||||||
|
@ -94,6 +97,7 @@ export const createWindow = async () => {
|
||||||
setupInvestLogReaderHandler();
|
setupInvestLogReaderHandler();
|
||||||
setupOpenExternalUrl();
|
setupOpenExternalUrl();
|
||||||
setupRendererLogger();
|
setupRendererLogger();
|
||||||
|
setupBaseUrl();
|
||||||
await getFlaskIsReady();
|
await getFlaskIsReady();
|
||||||
|
|
||||||
const devModeArg = ELECTRON_DEV_MODE ? '--devmode' : '';
|
const devModeArg = ELECTRON_DEV_MODE ? '--devmode' : '';
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
import {
|
||||||
|
ipcMain,
|
||||||
|
} from 'electron';
|
||||||
|
|
||||||
|
import { ipcMainChannels } from './ipcMainChannels';
|
||||||
|
|
||||||
|
import baseUrl from './baseUrl';
|
||||||
|
|
||||||
|
export function setupBaseUrl() {
|
||||||
|
ipcMain.handle(
|
||||||
|
ipcMainChannels.BASE_URL, () => baseUrl
|
||||||
|
);
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
import {
|
||||||
|
app,
|
||||||
|
ipcMain,
|
||||||
|
} from 'electron';
|
||||||
|
|
||||||
|
import { ipcMainChannels } from './ipcMainChannels';
|
||||||
|
import { getLogger } from './logger';
|
||||||
|
import pkg from '../../package.json';
|
||||||
|
|
||||||
|
const logger = getLogger(__filename.split('/').slice(-1)[0]);
|
||||||
|
|
||||||
|
export const APP_VERSION_TOKEN = 'app-version-token.txt';
|
||||||
|
|
||||||
|
/** Determine whether this is the first run of the current running version.
|
||||||
|
*
|
||||||
|
* @returns {boolean} true if this version has not run before, otherwise false
|
||||||
|
*/
|
||||||
|
export async function isNewVersion() {
|
||||||
|
// Getting version from package.json is simplest because there is no need to
|
||||||
|
// spawn an invest process simply to get the version of the installed binary.
|
||||||
|
const currentVersion = pkg.version;
|
||||||
|
const userDataPath = app.getPath('userData');
|
||||||
|
const tokenPath = path.join(userDataPath, APP_VERSION_TOKEN);
|
||||||
|
try {
|
||||||
|
if (fs.existsSync(tokenPath)) {
|
||||||
|
const tokenString = fs.readFileSync(tokenPath, {encoding: 'utf8'});
|
||||||
|
if (tokenString) {
|
||||||
|
const installedVersionList = tokenString.split(',');
|
||||||
|
if (installedVersionList.includes(currentVersion)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
// If current version not found, add it
|
||||||
|
fs.writeFileSync(tokenPath, `${tokenString},${currentVersion}`);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If file does not exist, create it
|
||||||
|
fs.writeFileSync(tokenPath, currentVersion);
|
||||||
|
} catch (error) {
|
||||||
|
logger.warn(`Unable to write app-version token: ${error}`);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function setupIsNewVersion() {
|
||||||
|
ipcMain.handle(
|
||||||
|
ipcMainChannels.IS_NEW_VERSION, () => isNewVersion()
|
||||||
|
);
|
||||||
|
}
|
|
@ -24,6 +24,7 @@ import DownloadProgressBar from './components/DownloadProgressBar';
|
||||||
import { getInvestModelNames } from './server_requests';
|
import { getInvestModelNames } from './server_requests';
|
||||||
import InvestJob from './InvestJob';
|
import InvestJob from './InvestJob';
|
||||||
import { dragOverHandlerNone } from './utils';
|
import { dragOverHandlerNone } from './utils';
|
||||||
|
import Changelog from './components/Changelog';
|
||||||
|
|
||||||
const { ipcRenderer } = window.Workbench.electron;
|
const { ipcRenderer } = window.Workbench.electron;
|
||||||
|
|
||||||
|
@ -42,6 +43,8 @@ export default class App extends React.Component {
|
||||||
recentJobs: [],
|
recentJobs: [],
|
||||||
showDownloadModal: false,
|
showDownloadModal: false,
|
||||||
downloadedNofN: null,
|
downloadedNofN: null,
|
||||||
|
showChangelog: false,
|
||||||
|
changelogDismissed: false,
|
||||||
};
|
};
|
||||||
this.switchTabs = this.switchTabs.bind(this);
|
this.switchTabs = this.switchTabs.bind(this);
|
||||||
this.openInvestModel = this.openInvestModel.bind(this);
|
this.openInvestModel = this.openInvestModel.bind(this);
|
||||||
|
@ -65,6 +68,9 @@ export default class App extends React.Component {
|
||||||
.includes(job.modelRunName)
|
.includes(job.modelRunName)
|
||||||
)),
|
)),
|
||||||
showDownloadModal: this.props.isFirstRun,
|
showDownloadModal: this.props.isFirstRun,
|
||||||
|
// Show changelog if this is a new version,
|
||||||
|
// but if it's the first run ever, wait until after download modal closes.
|
||||||
|
showChangelog: this.props.isNewVersion && !this.props.isFirstRun,
|
||||||
});
|
});
|
||||||
await i18n.changeLanguage(window.Workbench.LANGUAGE);
|
await i18n.changeLanguage(window.Workbench.LANGUAGE);
|
||||||
ipcRenderer.on('download-status', (downloadedNofN) => {
|
ipcRenderer.on('download-status', (downloadedNofN) => {
|
||||||
|
@ -92,6 +98,20 @@ export default class App extends React.Component {
|
||||||
this.setState({
|
this.setState({
|
||||||
showDownloadModal: shouldShow,
|
showDownloadModal: shouldShow,
|
||||||
});
|
});
|
||||||
|
// After close, show changelog if new version and app has just launched
|
||||||
|
// (i.e., show changelog only once, after the first time the download modal closes).
|
||||||
|
if (!shouldShow && this.props.isNewVersion && !this.state.changelogDismissed) {
|
||||||
|
this.setState({
|
||||||
|
showChangelog: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
closeChangelogModal() {
|
||||||
|
this.setState({
|
||||||
|
showChangelog: false,
|
||||||
|
changelogDismissed: true,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Push data for a new InvestTab component to an array.
|
/** Push data for a new InvestTab component to an array.
|
||||||
|
@ -183,6 +203,7 @@ export default class App extends React.Component {
|
||||||
openTabIDs,
|
openTabIDs,
|
||||||
activeTab,
|
activeTab,
|
||||||
showDownloadModal,
|
showDownloadModal,
|
||||||
|
showChangelog,
|
||||||
downloadedNofN,
|
downloadedNofN,
|
||||||
} = this.state;
|
} = this.state;
|
||||||
|
|
||||||
|
@ -277,6 +298,13 @@ export default class App extends React.Component {
|
||||||
show={showDownloadModal}
|
show={showDownloadModal}
|
||||||
closeModal={() => this.showDownloadModal(false)}
|
closeModal={() => this.showDownloadModal(false)}
|
||||||
/>
|
/>
|
||||||
|
{
|
||||||
|
showChangelog &&
|
||||||
|
<Changelog
|
||||||
|
show={showChangelog}
|
||||||
|
close={() => this.closeChangelogModal()}
|
||||||
|
/>
|
||||||
|
}
|
||||||
<TabContainer activeKey={activeTab}>
|
<TabContainer activeKey={activeTab}>
|
||||||
<Navbar
|
<Navbar
|
||||||
onDragOver={dragOverHandlerNone}
|
onDragOver={dragOverHandlerNone}
|
||||||
|
@ -357,6 +385,7 @@ export default class App extends React.Component {
|
||||||
|
|
||||||
App.propTypes = {
|
App.propTypes = {
|
||||||
isFirstRun: PropTypes.bool,
|
isFirstRun: PropTypes.bool,
|
||||||
|
isNewVersion: PropTypes.bool,
|
||||||
nCPU: PropTypes.number,
|
nCPU: PropTypes.number,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -364,5 +393,6 @@ App.propTypes = {
|
||||||
// can be undefined for unrelated tests.
|
// can be undefined for unrelated tests.
|
||||||
App.defaultProps = {
|
App.defaultProps = {
|
||||||
isFirstRun: false,
|
isFirstRun: false,
|
||||||
|
isNewVersion: false,
|
||||||
nCPU: 1,
|
nCPU: 1,
|
||||||
};
|
};
|
||||||
|
|
|
@ -0,0 +1,103 @@
|
||||||
|
import React, { useEffect, useState } from 'react';
|
||||||
|
import PropTypes from 'prop-types';
|
||||||
|
import Button from 'react-bootstrap/Button';
|
||||||
|
import Modal from 'react-bootstrap/Modal';
|
||||||
|
import { MdClose } from 'react-icons/md';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
|
import pkg from '../../../../package.json';
|
||||||
|
import { ipcMainChannels } from '../../../main/ipcMainChannels';
|
||||||
|
|
||||||
|
const { ipcRenderer } = window.Workbench.electron;
|
||||||
|
const { logger } = window.Workbench;
|
||||||
|
|
||||||
|
export default function Changelog(props) {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const [htmlContent, setHtmlContent] = useState('');
|
||||||
|
|
||||||
|
// Load HTML from external file (which is generated by Python build process).
|
||||||
|
useEffect(() => {
|
||||||
|
async function loadHtml() {
|
||||||
|
const baseUrl = await ipcRenderer.invoke(ipcMainChannels.BASE_URL);
|
||||||
|
const response = await fetch(`${baseUrl}/changelog.html`);
|
||||||
|
if (!response.ok) {
|
||||||
|
logger.debug(`Error fetching changelog HTML: ${response.status} ${response.statusText}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const htmlString = await response.text();
|
||||||
|
// Find the section whose heading explicitly matches the current version.
|
||||||
|
const versionStr = pkg.version;
|
||||||
|
const escapedVersionStr = versionStr.split('.').join('\\.');
|
||||||
|
const sectionRegex = new RegExp(
|
||||||
|
`<section.*?>[\\s]*?<h1>${escapedVersionStr}\\b[\\s\\S]*?</h1>[\\s\\S]*?</section>`
|
||||||
|
);
|
||||||
|
const sectionMatches = htmlString.match(sectionRegex);
|
||||||
|
if (sectionMatches && sectionMatches.length) {
|
||||||
|
let latestVersionSection = sectionMatches[0];
|
||||||
|
const linkRegex = /<a\shref/g;
|
||||||
|
// Ensure all links open in a new window and are styled with a relevant icon.
|
||||||
|
latestVersionSection = latestVersionSection.replaceAll(
|
||||||
|
linkRegex,
|
||||||
|
'<a target="_blank" class="link-external" href'
|
||||||
|
);
|
||||||
|
setHtmlContent({
|
||||||
|
__html: latestVersionSection
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch(error) {
|
||||||
|
logger.debug(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loadHtml();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
// Once HTML content has loaded, set up links to open in browser
|
||||||
|
// (instead of in an Electron window).
|
||||||
|
useEffect(() => {
|
||||||
|
const openLinkInBrowser = (event) => {
|
||||||
|
event.preventDefault();
|
||||||
|
ipcRenderer.send(
|
||||||
|
ipcMainChannels.OPEN_EXTERNAL_URL, event.currentTarget.href
|
||||||
|
);
|
||||||
|
};
|
||||||
|
document.querySelectorAll('.link-external').forEach(link => {
|
||||||
|
link.addEventListener('click', openLinkInBrowser);
|
||||||
|
});
|
||||||
|
}, [htmlContent]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Modal
|
||||||
|
show={props.show && htmlContent !== ''}
|
||||||
|
onHide={props.close}
|
||||||
|
size="lg"
|
||||||
|
aria-labelledby="changelog-modal-title"
|
||||||
|
>
|
||||||
|
<Modal.Header>
|
||||||
|
<Modal.Title id="changelog-modal-title">
|
||||||
|
{t('New in this version')}
|
||||||
|
</Modal.Title>
|
||||||
|
<Button
|
||||||
|
variant="secondary-outline"
|
||||||
|
onClick={props.close}
|
||||||
|
className="float-right"
|
||||||
|
aria-label="Close modal"
|
||||||
|
>
|
||||||
|
<MdClose />
|
||||||
|
</Button>
|
||||||
|
</Modal.Header>
|
||||||
|
{/* Setting inner HTML in this way is OK because
|
||||||
|
the HTML content is controlled by our build process
|
||||||
|
and not, for example, sourced from user input. */}
|
||||||
|
<Modal.Body
|
||||||
|
dangerouslySetInnerHTML={htmlContent}
|
||||||
|
>
|
||||||
|
</Modal.Body>
|
||||||
|
</Modal>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
Changelog.propTypes = {
|
||||||
|
show: PropTypes.bool.isRequired,
|
||||||
|
close: PropTypes.func.isRequired,
|
||||||
|
};
|
|
@ -174,6 +174,38 @@ class DataDownloadModal extends React.Component {
|
||||||
// Don't render until registry is loaded, since it loads async
|
// Don't render until registry is loaded, since it loads async
|
||||||
if (!dataRegistry) { return <div />; }
|
if (!dataRegistry) { return <div />; }
|
||||||
|
|
||||||
|
const displayNames = {
|
||||||
|
'Annual Water Yield': t('Annual Water Yield'),
|
||||||
|
'Carbon Storage and Sequestration': t('Carbon Storage and Sequestration'),
|
||||||
|
'Coastal Blue Carbon': t('Coastal Blue Carbon'),
|
||||||
|
'Coastal Vulnerability': t('Coastal Vulnerability'),
|
||||||
|
'Crop Pollination': t('Crop Pollination'),
|
||||||
|
'Crop Production': t('Crop Production'),
|
||||||
|
'DelineateIt': t('DelineateIt'),
|
||||||
|
'Forest Carbon Edge Effect': t('Forest Carbon Edge Effect'),
|
||||||
|
'Habitat Quality': t('Habitat Quality'),
|
||||||
|
'Habitat Risk Assessment': t('Habitat Risk Assessment'),
|
||||||
|
'Nutrient Delivery Ratio': t('Nutrient Delivery Ratio'),
|
||||||
|
'RouteDEM': t('RouteDEM'),
|
||||||
|
'Scenario Generator: Proximity Based': t('Scenario Generator: Proximity Based'),
|
||||||
|
'Scenic Quality': t('Scenic Quality'),
|
||||||
|
'Seasonal Water Yield': t('Seasonal Water Yield'),
|
||||||
|
'Sediment Delivery Ratio': t('Sediment Delivery Ratio'),
|
||||||
|
'Urban Stormwater Retention': t('Urban Stormwater Retention'),
|
||||||
|
'Urban Cooling': t('Urban Cooling'),
|
||||||
|
'Urban Flood Risk Mitigation': t('Urban Flood Risk Mitigation'),
|
||||||
|
'Urban Nature Access': t('Urban Nature Access'),
|
||||||
|
'Visitation: Recreation and Tourism': t('Visitation: Recreation and Tourism'),
|
||||||
|
'Wave Energy Production': t('Wave Energy Production'),
|
||||||
|
'Wind Energy Production': t('Wind Energy Production'),
|
||||||
|
'Global DEM & Landmass Polygon': t('Global DEM & Landmass Polygon'),
|
||||||
|
};
|
||||||
|
const displayNotes = {
|
||||||
|
'(recommended to run model)': t('(recommended to run model)'),
|
||||||
|
'(required to run model)': t('(required to run model)'),
|
||||||
|
'(required for Wind & Wave Energy)': t('(required for Wind & Wave Energy)'),
|
||||||
|
};
|
||||||
|
|
||||||
const downloadEnabled = Boolean(selectedLinksArray.length);
|
const downloadEnabled = Boolean(selectedLinksArray.length);
|
||||||
const DatasetCheckboxRows = [];
|
const DatasetCheckboxRows = [];
|
||||||
Object.keys(modelCheckBoxState)
|
Object.keys(modelCheckBoxState)
|
||||||
|
@ -196,11 +228,11 @@ class DataDownloadModal extends React.Component {
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
<Form.Check.Label>
|
<Form.Check.Label>
|
||||||
{modelName}
|
{displayNames[modelName]}
|
||||||
</Form.Check.Label>
|
</Form.Check.Label>
|
||||||
</Form.Check>
|
</Form.Check>
|
||||||
</td>
|
</td>
|
||||||
<td><em>{note}</em></td>
|
<td><em>{displayNotes[note]}</em></td>
|
||||||
<td>{filesizeStr}</td>
|
<td>{filesizeStr}</td>
|
||||||
</tr>
|
</tr>
|
||||||
);
|
);
|
||||||
|
@ -211,6 +243,7 @@ class DataDownloadModal extends React.Component {
|
||||||
show={this.props.show}
|
show={this.props.show}
|
||||||
onHide={this.closeDialog}
|
onHide={this.closeDialog}
|
||||||
size="lg"
|
size="lg"
|
||||||
|
aria-labelledby="download-modal-title"
|
||||||
>
|
>
|
||||||
<Form>
|
<Form>
|
||||||
<Modal.Header>
|
<Modal.Header>
|
||||||
|
@ -230,7 +263,9 @@ class DataDownloadModal extends React.Component {
|
||||||
<p className="mb-0"><em>{this.state.alertPath}</em></p>
|
<p className="mb-0"><em>{this.state.alertPath}</em></p>
|
||||||
</Alert>
|
</Alert>
|
||||||
)
|
)
|
||||||
: <Modal.Title>{t("Download InVEST sample data")}</Modal.Title>
|
: <Modal.Title id="download-modal-title">
|
||||||
|
{t("Download InVEST sample data")}
|
||||||
|
</Modal.Title>
|
||||||
}
|
}
|
||||||
</Modal.Header>
|
</Modal.Header>
|
||||||
<Modal.Body>
|
<Modal.Body>
|
||||||
|
|
|
@ -32,11 +32,8 @@ class OpenButton extends React.Component {
|
||||||
datastack = await fetchDatastackFromFile({ filepath: data.filePaths[0] });
|
datastack = await fetchDatastackFromFile({ filepath: data.filePaths[0] });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(error);
|
logger.error(error);
|
||||||
alert(
|
alert( // eslint-disable-line no-alert
|
||||||
t(
|
`${t('No InVEST model data can be parsed from the file:')}\n${data.filePaths[0]}`
|
||||||
'No InVEST model data can be parsed from the file:\n {{filepath}}',
|
|
||||||
{ filepath: data.filePaths[0] }
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,7 +52,7 @@ function FormLabel(props) {
|
||||||
<Form.Label column sm="3" htmlFor={argkey}>
|
<Form.Label column sm="3" htmlFor={argkey}>
|
||||||
<span className="argname">{argname} </span>
|
<span className="argname">{argname} </span>
|
||||||
{
|
{
|
||||||
(userFriendlyArgType || optional) &&
|
(userFriendlyArgType || optional) &&
|
||||||
<span>
|
<span>
|
||||||
(
|
(
|
||||||
{userFriendlyArgType}
|
{userFriendlyArgType}
|
||||||
|
|
|
@ -312,15 +312,12 @@ class SetupTab extends React.Component {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
datastack = await fetchDatastackFromFile({ filepath: filepath });
|
datastack = await fetchDatastackFromFile({ filepath: filepath });
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(error);
|
logger.error(error);
|
||||||
alert( // eslint-disable-line no-alert
|
alert( // eslint-disable-line no-alert
|
||||||
t(
|
`${t('No InVEST model data can be parsed from the file:')}\n${filepath}`
|
||||||
'No InVEST model data can be parsed from the file:\n {{filepath}}',
|
|
||||||
{ filepath: filepath }
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,82 +1,136 @@
|
||||||
{
|
{
|
||||||
"Download InVEST sample data": "Descargar datos de muestra de InVEST",
|
"Annual Water Yield": "Producción anual de agua",
|
||||||
"Cancel": "Cancelar",
|
"Carbon Storage and Sequestration": "Almacenamiento y secuestración de carbono",
|
||||||
"Download": "Descargar",
|
"Coastal Blue Carbon": "Carbono azul costero",
|
||||||
"Download Failed": "Falló la descarga",
|
"Coastal Vulnerability": "Vulnerabilidad de las costas",
|
||||||
"Download Complete": "Descarga completa",
|
"Crop Pollination": "Polinización de cultivos",
|
||||||
"Downloading {{number}} of {{nTotal}}": "",
|
"Crop Production": "Producción de cultivos",
|
||||||
"Something went wrong": "Algo salió mal",
|
"DelineateIt": "DelineatIt",
|
||||||
"Please help us fix this by reporting the problem.You may follow these steps:": "",
|
"Forest Carbon Edge Effect": "Efecto de borde del carbono forestal",
|
||||||
"Find the Workbench log files using the button below.": "",
|
"Habitat Quality": "Calidad del hábitat",
|
||||||
"There may be multiple files with a \".log\" extension.": "",
|
"Habitat Risk Assessment": "Evaluación de riesgo de hábitats",
|
||||||
"Find My Logs": "Encontrar mis registros",
|
"Nutrient Delivery Ratio": "Tasa de entrega de nutrientes",
|
||||||
"Create a post on our forum ": "Crear un mensaje en nuestro foro",
|
"RouteDEM": "RouteDEM",
|
||||||
"and upload all the log files, along with a brief description of what happened before you saw this message.": "y suba todos los archivos de registro, junto con una breve descripción de lo que ocurrió antes de ver este mensaje.",
|
"Scenario Generator: Proximity Based": "Generador de Escenarios: Basado en Proximidad",
|
||||||
"Model Complete": "Modelo completo",
|
"Scenic Quality": "Calidad escénica",
|
||||||
"Recent runs:": "Corridas recientes:",
|
"Seasonal Water Yield": "Producción de agua estacional",
|
||||||
"Set up a model from a sample datastack file (.json) or from an InVEST model's logfile (.txt): ": "Configure un modelo a partir de un archivo Datastack de ejemplo (.json) o del archivo de registro de un modelo InVEST (.txt)",
|
"Sediment Delivery Ratio": "Tasa de entrega de sedimentos",
|
||||||
"Setup": "Configuración",
|
"Urban Stormwater Retention": "Retención urbana de agua de tormenta",
|
||||||
"Log": "Registro",
|
"Urban Cooling": "Enfriamiento urbano",
|
||||||
"Open Workspace": "Abrir el espacio de trabajo",
|
"Urban Flood Risk Mitigation": "Mitigación de riesgos de inundaciones urbanas",
|
||||||
"Cancel Run": "Cancelar la ejecución",
|
"Urban Nature Access": "Acceso urbano a la naturaleza",
|
||||||
"Error: see log for details": "Error: ver el registro para más detalles",
|
"Visitation: Recreation and Tourism": "Visitación: recreación y turismo",
|
||||||
"Run Canceled": "Ejecución cancelada",
|
"Wave Energy Production": "Producción de energía de olas",
|
||||||
"Browse to a datastack (.json) or InVEST logfile (.txt)": "Buscar una pila de datos (.json) o un archivo de registro InVEST (.txt)",
|
"Wind Energy Production": "Producción de energía eólica",
|
||||||
"Open": "Abrir",
|
"Global DEM & Landmass Polygon": "MED Global y Polígono de Masa Terrestre",
|
||||||
"User's Guide": "Guía de uso",
|
"(recommended to run model)": "(se recomienda para que corra el modelo)",
|
||||||
"Frequently Asked Questions": "Preguntas frecuentes",
|
"(required to run model)": "(se requiere para que corra el modelo)",
|
||||||
"Save as...": "Guardar como...",
|
"(required for Wind & Wave Energy)": "(se requiere para Energía Eólica y de Olas)",
|
||||||
"Datastack options": "Opciones de Datastack",
|
"Please choose a different folder. This application does not have permission to write to folder:": "Por favor, seleccione una carpeta diferente. Esta aplicación no tiene permiso para escribir en la carpeta:",
|
||||||
"Save your parameters in a JSON file. This includes thepaths to your input data, but not the data itself. Open this file in InVEST to restore your parameters.": "",
|
"Download InVEST sample data": "Descargar datos de muestra de InVEST",
|
||||||
"Save your parameters and input data in a compressed archive. This archive contains the same JSON file produced by the \"Parameters only\" option, plus the data. You can open this file in InVEST to restore your parameters. This option is useful to copy all the necessary data to a different location.": "Guarde sus parámetros y datos de entrada en un archivo comprimido. Este archivo contiene el mismo archivo JSON producido por la opción \"Sólo parámetros\", además de los datos. Puede abrir este archivo en InVEST para restaurar sus parámetros. Esta opción es útil para copiar todos los datos necesarios en una ubicación diferente.",
|
"Cancel": "Cancelar",
|
||||||
"Save your parameters in a python script. This includes the paths to your input data, but not the data itself. Running the python script will programmatically run the model with your parameters. Use this as a starting point for batch scripts.": "Guarde sus parámetros en un script de Python. Esto incluye las rutas a los datos de entrada, pero no los datos en sí. Al ejecutar el script de Python, el modelo se ejecutará mediante programación con sus parámetros. Utilícelo como punto de partida para scripts por lotes.",
|
"Download": "Descargar",
|
||||||
"Save": "",
|
"Download Failed": "Falló la descarga",
|
||||||
"InVEST Settings": "Configuración de InVEST",
|
"Download Complete": "Descarga completa",
|
||||||
"Language": "Idioma",
|
"Downloading {{number}} of {{nTotal}}": "Descargando {{number}} of {{nTotal}}",
|
||||||
"Changing this setting will refresh the app and close all tabs": "",
|
"Something went wrong": "Algo salió mal",
|
||||||
"Logging threshold": "Umbral de registro",
|
"Please help us fix this by reporting the problem.You may follow these steps:": "Por favor, ayúdenos a reparar esto informando sobre el problema. Puede seguir estos pasos:",
|
||||||
"Taskgraph logging threshold": "Umbral de registro del gráfico de tareas",
|
"Find the Workbench log files using the button below.": "Halle los archivos de registro Workbench usando el botón de abajo.",
|
||||||
"Taskgraph n_workers parameter": "Parámetro n_workers del gráfico de tareas",
|
"There may be multiple files with a \".log\" extension.": "Puede haber varios archivos con una extensión \".log\".",
|
||||||
"more info": "",
|
"Find My Logs": "Encontrar mis registros",
|
||||||
"synchronous task execution is most reliable": "la ejecución de tareas sincrónicas es la más fiable",
|
"Create a post on our forum ": "Genere un mensaje en nuestro foro ",
|
||||||
"threaded task management: tasks execute only in the main process, using multiple threads.": "gestión de tareas por hilos: las tareas se ejecutan solo en el proceso principal, utilizando varios hilos.",
|
"and upload all the log files, along with a brief description of what happened before you saw this message.": "y suba todos los archivos de registro, junto con una breve descripción de lo que ocurrió antes de ver este mensaje.",
|
||||||
"n CPUs: depending on the InVEST model, tasks may execute in parallel using up to this many processes.": "n CPUs: dependiendo del modelo de InVEST, las tareas pueden ejecutarse en paralelo utilizando hasta este número de procesos.",
|
"Model Complete": "Modelo completo",
|
||||||
"Reset to Defaults": "Valores predeterminados",
|
"Recent runs:": "Corridas recientes:",
|
||||||
"Download Sample Data": "Descargar datos de muestra",
|
"Set up a model from a sample datastack file (.json) or from an InVEST model's logfile (.txt): ": "Configure un modelo a partir de un archivo Datastack de ejemplo (.json) o del archivo de registro de un modelo InVEST (.txt) ",
|
||||||
"Clear Recent Jobs": "Borrar trabajos recientes",
|
"Setup": "Configuración",
|
||||||
"no invest workspaces will be deleted": "no se eliminará ningún espacio de trabajo de InVEST",
|
"Log": "Registro",
|
||||||
"Bounding box does not intersect at least one other:": "La caja delimitadora no se interseca al menos con otra:",
|
"Open Workspace": "Abrir el espacio de trabajo",
|
||||||
"optional": "",
|
"Cancel Run": "Cancelar la ejecución",
|
||||||
"text": "texto",
|
"Error: see log for details": "Error: ver el registro para más detalles",
|
||||||
"percent: a number from 0 - 100": "porcentaje: un número de 0 a 100",
|
"Run Canceled": "Ejecución cancelada",
|
||||||
"ratio: a decimal from 0 - 1": "tasa: un decimal de 0 - 1",
|
"No InVEST model data can be parsed from the file:": "No hay datos de modelos InVEST que puedan ser analizados desde el archivo:",
|
||||||
"User's guide entry": "Entrada de la guía del usuario",
|
"Browse to a datastack (.json) or InVEST logfile (.txt)": "Buscar una pila de datos (.json) o un archivo de registro InVEST (.txt)",
|
||||||
"Only drop one file at a time.": "Solo se puede soltar un archivo a la vez.",
|
"Open": "Abrir",
|
||||||
"Running": "En curso",
|
"User's Guide": "Guía de uso",
|
||||||
"Run": "Ejecutar",
|
"Frequently Asked Questions": "Preguntas frecuentes",
|
||||||
"Browse to a datastack (.json, .tgz) or InVEST logfile (.txt)": "Buscar una pila de datos (.json, .tgz) o un archivo de registro InVEST (.txt)",
|
"Save as...": "Guardar como...",
|
||||||
"Load parameters from file": "Cargar los parámetros desde un archivo",
|
"Datastack options": "Opciones de Datastack",
|
||||||
"No args to see here": "No hay argumentos que ver aquí",
|
"Save your parameters in a JSON file. This includes the paths to your input data, but not the data itself. Open this file in InVEST to restore your parameters.": "Guarde sus parámetros en un archivo JSON. Esto incluye las rutas a los datos de entrada, pero no los datos en sí. Abra este archivo en InVEST para restaurar sus parámetros.",
|
||||||
"version:": "",
|
"Save your parameters and input data in a compressed archive. This archive contains the same JSON file produced by the \"Parameters only\" option, plus the data. You can open this file in InVEST to restore your parameters. This option is useful to copy all the necessary data to a different location.": "Guarde sus parámetros y datos de entrada en un archivo comprimido. Este archivo contiene el mismo archivo JSON producido por la opción \"Sólo parámetros\", además de los datos. Puede abrir este archivo en InVEST para restaurar sus parámetros. Esta opción es útil para copiar todos los datos necesarios en una ubicación diferente.",
|
||||||
"Copyright 2022, The Natural Capital Project": "",
|
"Save your parameters in a python script. This includes the paths to your input data, but not the data itself. Running the python script will programmatically run the model with your parameters. Use this as a starting point for batch scripts.": "Guarde sus parámetros en un script de Python. Esto incluye las rutas a los datos de entrada, pero no los datos en sí. Al ejecutar el script de Python, el modelo se ejecutará mediante programación con sus parámetros. Utilícelo como punto de partida para scripts por lotes.",
|
||||||
"Documentation: ": "",
|
"Save": "Guardar",
|
||||||
"Homepage: ": "",
|
"Synchronous": "Sincrónico",
|
||||||
"Project page: ": "",
|
"Threaded task management": "Manejo encadenado de tareas",
|
||||||
"License: ": "",
|
"CPUs": "CPUs",
|
||||||
"Open-Source Licenses:": "",
|
"DEBUG": "DEPURAR",
|
||||||
"MIT and others": "",
|
"INFO": "INFORMACIÓN",
|
||||||
"Please help us by reporting problems.": "",
|
"WARNING": "ADVERTENCIA",
|
||||||
"If the problem is related to a specific InVEST model, ": "",
|
"ERROR": "ERROR",
|
||||||
"please see the guidelines here for reporting problems: ": "",
|
"InVEST Settings": "Configuración de InVEST",
|
||||||
"Guidelines for posting software support questions": "",
|
"Language": "Idioma",
|
||||||
"If the problem is related to this User Interface, ": "",
|
"Logging threshold": "Umbral de registro",
|
||||||
"rather than with a specific InVEST model,": "",
|
"Taskgraph logging threshold": "Umbral de registro del gráfico de tareas",
|
||||||
"Consider taking a screenshot of the problem.": "",
|
"more info": "más información",
|
||||||
"Find the log files using the button below. There may be multiple files with a \".log\" extension; please include them all.": "",
|
"synchronous task execution is most reliable": "la ejecución sincrónica de tareas es la más fiable",
|
||||||
"Create a post on our forum and upload these items, along with a brief description of the problem.": "",
|
"threaded task management: tasks execute only in the main process, using multiple threads.": "gestión de tareas por hilos: las tareas se ejecutan solo en el proceso principal, utilizando varios hilos.",
|
||||||
"InVEST": "InVEST",
|
"n CPUs: depending on the InVEST model, tasks may execute in parallel using up to this many processes.": "n CPUs: dependiendo del modelo de InVEST, las tareas pueden ejecutarse en paralelo utilizando hasta este número de procesos.",
|
||||||
"Find the Workbench log files ": "Buscar los archivos de registro de Workbench",
|
"Download Sample Data": "Descargar datos de muestra",
|
||||||
"Please help us fix this by reporting the problem. You may follow these steps:": "Por favor, ayúdenos a solucionar esto informando del problema. Puede seguir estos pasos:",
|
"Clear Recent Jobs": "Borrar trabajos recientes",
|
||||||
"using the button below. There may be multiple files with a \".log\" extension.": "con el botón de abajo. Puede haber varios archivos con extensión \".log\".",
|
"no invest workspaces will be deleted": "no se eliminará ningún espacio de trabajo de InVEST",
|
||||||
"Save your parameters in a JSON file. This includes the paths to your input data, but not the data itself. Open this file in InVEST to restore your parameters.": "Guarde sus parámetros en un archivo JSON. Esto incluye las rutas a los datos de entrada, pero no los datos en sí. Abra este archivo en InVEST para restaurar sus parámetros."
|
"Warning": "Advertencia",
|
||||||
}
|
"Changing this setting will close your tabs and relaunch the app.": "Cambiar esta opciín cerrará las pestañas y relanzará la aplicación.",
|
||||||
|
"Change to ": "Cambiar a ",
|
||||||
|
"Not all of the spatial layers overlap each other. Bounding box:": "No todas las capas espaciales se traslapan entre ellas. Caja delmitadora:",
|
||||||
|
"optional": "opcional",
|
||||||
|
"boolean": "booleano",
|
||||||
|
"integer": "entero",
|
||||||
|
"csv": "csv",
|
||||||
|
"directory": "directorio",
|
||||||
|
"file": "archivo",
|
||||||
|
"number": "número",
|
||||||
|
"percent": "porcentaje",
|
||||||
|
"raster": "ráster",
|
||||||
|
"ratio": "tasa",
|
||||||
|
"vector": "vector",
|
||||||
|
"text": "texto",
|
||||||
|
"percent: a number from 0 - 100": "porcentaje: un número de 0 a 100",
|
||||||
|
"ratio: a decimal from 0 - 1": "tasa: un decimal de 0 - 1",
|
||||||
|
"User's guide entry": "Entrada de la guía del usuario",
|
||||||
|
"Only drop one file at a time.": "Solo se puede soltar un archivo a la vez.",
|
||||||
|
"Choose location to extract archive": "Seleccione la ubicación donde extraer el archivo",
|
||||||
|
"Datastack/Logfile for {{modelName}} does not match this model.": "Datastack/Logfile para {{modelName}} no calza con este modelo.",
|
||||||
|
"Running": "En curso",
|
||||||
|
"Run": "Ejecutar",
|
||||||
|
"Browse to a datastack (.json, .tgz) or InVEST logfile (.txt)": "Buscar una pila de datos (.json, .tgz) o un archivo de registro InVEST (.txt)",
|
||||||
|
"Load parameters from file": "Cargar los parámetros desde un archivo",
|
||||||
|
"No args to see here": "No hay argumentos que ver aquí",
|
||||||
|
"version:": "versión:",
|
||||||
|
"Copyright 2023, The Natural Capital Project": "Copyright 2023, The Natural Capital Project",
|
||||||
|
"Documentation": "Documentación",
|
||||||
|
"Homepage": "Página web",
|
||||||
|
"Project page": "Página del proyecto",
|
||||||
|
"License": "Licencia",
|
||||||
|
"InVEST Trademark and Logo Use Policy": "Política de uso de la marca y el logotipo de InVEST",
|
||||||
|
"Open-Source Licenses:": "Licencias de código abierto:",
|
||||||
|
"MIT and others": "MIT y otros",
|
||||||
|
"Please help us by reporting problems.": "Por favor, ayúdenos informando sobre problemas.",
|
||||||
|
"If the problem is related to a specific InVEST model, ": "Si el problema se relaciona con un modelo InVEST específico, ",
|
||||||
|
"please see the guidelines here for reporting problems: ": "por favor, refiérase a la guía para informar sobre problemas aquí: ",
|
||||||
|
"Guidelines for posting software support questions": "Guía para publicar preguntas de apoyo sobre software",
|
||||||
|
"If the problem is related to this User Interface, ": "Si el problema se relaciona con esta interfaz de uso, ",
|
||||||
|
"rather than with a specific InVEST model,": "más que con un modelo InVEST específico,",
|
||||||
|
"Consider taking a screenshot of the problem.": "Considere hacer una captura de pantalla del problema.",
|
||||||
|
"Find the log files using the button below. There may be multiple files with a \".log\" extension; please include them all.": "Halle los archivos de registro usando el botón más abajo. Podría haber varios archivos con una extensión \".log\"; por favor, incluya todos ellos.",
|
||||||
|
"Create a post on our forum and upload these items, along with a brief description of the problem.": "Cree un posteo en nuestro foro y cargue estos ítemes junto a una breve descripción del problema.",
|
||||||
|
"Save your parameters in a JSON file. This includes thepaths to your input data, but not the data itself. Open this file in InVEST to restore your parameters.": "Guarde sus parámetros en un archivo JSON. Esto incluye las rutas a sus datos de entrada, pero no los datos en sí. Abra este archivo en InVEST para reestablecer sus parámetros.",
|
||||||
|
"Reset to Defaults": "Valores predeterminados",
|
||||||
|
"Bounding box does not intersect at least one other:": "La caja delimitadora no se interseca al menos con otra:",
|
||||||
|
"InVEST": "InVEST",
|
||||||
|
"Find the Workbench log files ": "Buscar los archivos de registro de Workbench ",
|
||||||
|
"Please help us fix this by reporting the problem. You may follow these steps:": "Por favor, ayúdenos a solucionar esto informando del problema. Puede seguir estos pasos:",
|
||||||
|
"using the button below. There may be multiple files with a \".log\" extension.": "con el botón de abajo. Puede haber varios archivos con extensión \".log\".",
|
||||||
|
"Taskgraph n_workers parameter": "Parámetro n_workers del gráfico de tareas",
|
||||||
|
"freestyle_string": "freestyle",
|
||||||
|
"option_string": "option"
|
||||||
|
}
|
|
@ -1,103 +1,147 @@
|
||||||
{
|
{
|
||||||
"Download InVEST sample data": "下载InVEST样例数据",
|
"Annual Water Yield": "年出水量",
|
||||||
"Cancel": "取消",
|
"Carbon Storage and Sequestration": "碳储存和碳封存",
|
||||||
"Download": "下载",
|
"Coastal Blue Carbon": "海岸蓝碳",
|
||||||
"Download Failed": "下载失败",
|
"Coastal Vulnerability": "沿海地区的脆弱性",
|
||||||
"Download Complete": "下载完整版",
|
"Crop Pollination": "作物授粉",
|
||||||
"Downloading {{number}} of {{nTotal}}": "下载{{nTotal}}个文件中的第{{number}}个",
|
"Crop Production": "农作物生产",
|
||||||
"Something went wrong": "出了些问题",
|
"DelineateIt": "划线",
|
||||||
"Please help us fix this by reporting the problem.You may follow these steps:": "请通过报告这个问题来帮助我们解决这个问题。你可以按照以下步骤进行:",
|
"Forest Carbon Edge Effect": "森林碳边效应",
|
||||||
"Find the Workbench log files using the button below.": "请使用下面的按钮来查看工作台的日志文件。",
|
"Habitat Quality": "生境质量",
|
||||||
"There may be multiple files with a \".log\" extension.": "可能有多个以\".log \"为扩展名的文件。",
|
"Habitat Risk Assessment": "生境风险评估",
|
||||||
"Find My Logs": "寻找我的日志文件",
|
"Nutrient Delivery Ratio": "营养输送比",
|
||||||
"Create a post on our forum ": "在我们的论坛上创建一个帖子",
|
"RouteDEM": "路线高程",
|
||||||
"and upload all the log files, along with a brief description of what happened before you saw this message.": "并上传所有的日志文件。请同时简要描述在你看到这条信息之前发生的状况。",
|
"Scenario Generator: Proximity Based": "情景生成器:基于邻近性",
|
||||||
"Model Complete": "模型运行完成",
|
"Scenic Quality": "风景质量",
|
||||||
"Recent runs:": "最近的运行:",
|
"Seasonal Water Yield": "季节产水量",
|
||||||
"Set up a model from a sample datastack file (.json) or from an InVEST model's logfile (.txt): ": "从示例数据堆栈文件 (.json) 或 InVEST 模型的日志文件 (.txt) 设置模型:",
|
"Sediment Delivery Ratio": "沉积物输送比",
|
||||||
"Setup": "设置",
|
"Urban Stormwater Retention": "城市雨水滞留",
|
||||||
"Log": "日志",
|
"Urban Cooling": "城市降温",
|
||||||
"Open Workspace": "打开工作空间",
|
"Urban Flood Risk Mitigation": "城市洪水风险缓解",
|
||||||
"Cancel Run": "取消运行",
|
"Urban Nature Access": "城市自然通道",
|
||||||
"Error: see log for details": "错误:详见日志",
|
"Visitation: Recreation and Tourism": "参观:娱乐和旅游",
|
||||||
"Run Canceled": "取消运行",
|
"Wave Energy Production": "波浪能生产",
|
||||||
"Browse to a datastack (.json) or InVEST logfile (.txt)": "浏览到一个数据包(.json)或InVEST日志文件(.txt)。",
|
"Wind Energy Production": "风能生产",
|
||||||
"Open": "打开",
|
"Global DEM & Landmass Polygon": "全球 高程 和地块多边形",
|
||||||
"User's Guide": "用户指南",
|
"(recommended to run model)": "(建议运行模型)",
|
||||||
"Frequently Asked Questions": "常见问题",
|
"(required to run model)": "(运行模型所需)",
|
||||||
"Save as...": "另存为…",
|
"(required for Wind & Wave Energy)": "(风能和波浪能所需)",
|
||||||
"Datastack options": "数据堆栈选项",
|
"Please choose a different folder. This application does not have permission to write to folder:": "请选择其他文件夹。此应用程序没有写入文件夹的权限:",
|
||||||
"Save your parameters in a JSON file. This includes the paths to your input data, but not the data itself. Open this file in InVEST to restore your parameters.": "将参数保存在 JSON 文件中。 这包括输入数据的路径,但不包括数据本身。 在 InVEST 中打开此文件以恢复您的参数。",
|
"Download InVEST sample data": "下载InVEST样例数据",
|
||||||
"Save your parameters and input data in a compressed archive. This archive contains the same JSON file produced by the \"Parameters only\" option, plus the data. You can open this file in InVEST to restore your parameters. This option is useful to copy all the necessary data to a different location.": "将您的参数和输入数据保存在压缩存档中。 此压缩存档包含由“仅参数”选项生成的相同 JSON 文件以及数据。 您可以在 InVEST 模型中打开此文件以恢复您的参数。 此选项有助于将所有必要数据复制到不同位置。",
|
"Cancel": "取消",
|
||||||
"Save your parameters in a python script. This includes the paths to your input data, but not the data itself. Running the python script will programmatically run the model with your parameters. Use this as a starting point for batch scripts.": "将参数保存在 python 脚本中。 这包括输入数据的路径,但不包括数据本身。 运行 python 脚本将以编程方式使用您的参数运行模型。 您可以基于此用脚本进行批处理。",
|
"Download": "下载",
|
||||||
"Save": "保存",
|
"Download Failed": "下载失败",
|
||||||
"Synchronous": "同步",
|
"Download Complete": "下载完整版",
|
||||||
"Threaded task management": "线程任务管理",
|
"Downloading {{number}} of {{nTotal}}": "下载{{nTotal}}个文件中的第{{number}}个",
|
||||||
"CPUs": "处理器",
|
"Something went wrong": "出了些问题",
|
||||||
"DEBUG": "调试",
|
"Please help us fix this by reporting the problem.You may follow these steps:": "请通过报告这个问题来帮助我们解决这个问题。你可以按照以下步骤进行:",
|
||||||
"INFO": "信息",
|
"Find the Workbench log files using the button below.": "请使用下面的按钮来查看工作台的日志文件。",
|
||||||
"WARNING": "警告",
|
"There may be multiple files with a \".log\" extension.": "可能有多个以\".log \"为扩展名的文件。",
|
||||||
"ERROR": "错误",
|
"Find My Logs": "寻找我的日志文件",
|
||||||
"InVEST Settings": "InVEST设置",
|
"Create a post on our forum ": "在我们的论坛上创建一个帖子",
|
||||||
"Language": "语言",
|
"and upload all the log files, along with a brief description of what happened before you saw this message.": "并上传所有的日志文件。请同时简要描述在你看到这条信息之前发生的状况。",
|
||||||
"Changing this setting will refresh the app and close all tabs": "更改此设置将刷新应用程序并关闭所有选项卡",
|
"Model Complete": "模型运行完成",
|
||||||
"Logging threshold": "日志阈值",
|
"Recent runs:": "最近的运行:",
|
||||||
"Taskgraph logging threshold": "任务图记录阈值",
|
"Set up a model from a sample datastack file (.json) or from an InVEST model's logfile (.txt): ": "从示例数据堆栈文件 (.json) 或 InVEST 模型的日志文件 (.txt) 设置模型:",
|
||||||
"more info": "更多信息",
|
"Setup": "设置",
|
||||||
"synchronous task execution is most reliable": "同步任务的执行是最可靠的",
|
"Log": "日志",
|
||||||
"threaded task management: tasks execute only in the main process, using multiple threads.": "线程任务管理:任务只在主进程中使用多线程执行。",
|
"Open Workspace": "打开工作空间",
|
||||||
"n CPUs: depending on the InVEST model, tasks may execute in parallel using up to this many processes.": "n CPU:根据 InVEST 模型,任务最多可以在这么多个进程下执行任务。",
|
"Cancel Run": "取消运行",
|
||||||
"Reset to Defaults": "重置为默认值",
|
"Error: see log for details": "错误:详见日志",
|
||||||
"Download Sample Data": "下载样例数据",
|
"Run Canceled": "取消运行",
|
||||||
"Clear Recent Jobs": "清除最近的工作",
|
"No InVEST model data can be parsed from the file:": "无法从文件中解析 InVEST 模型数据:",
|
||||||
"no invest workspaces will be deleted": "没有invest的工作空间将被删除",
|
"Browse to a datastack (.json) or InVEST logfile (.txt)": "浏览到一个数据包(.json)或InVEST日志文件(.txt)。",
|
||||||
"Bounding box does not intersect at least one other:": "边界框至少不与另一个相交:",
|
"Open": "打开",
|
||||||
"optional": "可选项",
|
"User's Guide": "用户指南",
|
||||||
"text": "文本",
|
"Frequently Asked Questions": "常见问题",
|
||||||
"percent: a number from 0 - 100": "百分比:一个从0-100的数字",
|
"Save as...": "另存为…",
|
||||||
"ratio: a decimal from 0 - 1": "比率:一个从0-1的小数",
|
"Datastack options": "数据堆栈选项",
|
||||||
"User's guide entry": "用户指南条目",
|
"Save your parameters in a JSON file. This includes the paths to your input data, but not the data itself. Open this file in InVEST to restore your parameters.": "将参数保存在 JSON 文件中。 这包括输入数据的路径,但不包括数据本身。 在 InVEST 中打开此文件以恢复您的参数。",
|
||||||
"Only drop one file at a time.": "一次只能丢一个文件。",
|
"Save your parameters and input data in a compressed archive. This archive contains the same JSON file produced by the \"Parameters only\" option, plus the data. You can open this file in InVEST to restore your parameters. This option is useful to copy all the necessary data to a different location.": "将您的参数和输入数据保存在压缩存档中。 此压缩存档包含由“仅参数”选项生成的相同 JSON 文件以及数据。 您可以在 InVEST 模型中打开此文件以恢复您的参数。 此选项有助于将所有必要数据复制到不同位置。",
|
||||||
"Datastack/Logfile for {{modelName}} does not match this model.": "{{modelName}} 个数据堆栈/日志文件与此模型不匹配。",
|
"Save your parameters in a python script. This includes the paths to your input data, but not the data itself. Running the python script will programmatically run the model with your parameters. Use this as a starting point for batch scripts.": "将参数保存在 python 脚本中。 这包括输入数据的路径,但不包括数据本身。 运行 python 脚本将以编程方式使用您的参数运行模型。 您可以基于此用脚本进行批处理。",
|
||||||
"Running": "运行中",
|
"Save": "保存",
|
||||||
"Run": "运行",
|
"Synchronous": "同步",
|
||||||
"Browse to a datastack (.json, .tgz) or InVEST logfile (.txt)": "浏览到数据包(.json, .tgz)或InVEST日志文件(.txt)。",
|
"Threaded task management": "线程任务管理",
|
||||||
"Load parameters from file": "从文件中加载参数",
|
"CPUs": "处理器",
|
||||||
"No args to see here": "在args dict中缺少关键信息",
|
"DEBUG": "调试",
|
||||||
"version:": "版本:",
|
"INFO": "信息",
|
||||||
"Copyright 2022, The Natural Capital Project": "版权所有 2022, The Natural Capital Project",
|
"WARNING": "警告",
|
||||||
"Documentation: ": "文档:",
|
"ERROR": "错误",
|
||||||
"Homepage: ": "主页:",
|
"InVEST Settings": "InVEST设置",
|
||||||
"Project page: ": "项目页面:",
|
"Language": "语言",
|
||||||
"License: ": "许可证:",
|
"Logging threshold": "日志阈值",
|
||||||
"Open-Source Licenses:": "开源许可证:",
|
"Taskgraph logging threshold": "任务图记录阈值",
|
||||||
"MIT and others": "MIT许可证及其他许可证",
|
"more info": "更多信息",
|
||||||
"Please help us by reporting problems.": "请通过报告问题来帮助我们。",
|
"synchronous task execution is most reliable": "同步任务的执行是最可靠的",
|
||||||
"If the problem is related to a specific InVEST model, ": "如果这个问题与特定的 InVEST 模型有关,",
|
"threaded task management: tasks execute only in the main process, using multiple threads.": "线程任务管理:任务只在主进程中使用多线程执行。",
|
||||||
"please see the guidelines here for reporting problems: ": "请查看此指南,然后上报问题:",
|
"n CPUs: depending on the InVEST model, tasks may execute in parallel using up to this many processes.": "n CPU:根据 InVEST 模型,任务最多可以在这么多个进程下执行任务。",
|
||||||
"Guidelines for posting software support questions": "发布软件支持问题的指南",
|
"Download Sample Data": "下载样例数据",
|
||||||
"If the problem is related to this User Interface, ": "如果问题与此用户界面有关,",
|
"Clear Recent Jobs": "清除最近的工作",
|
||||||
"rather than with a specific InVEST model,": "而不是与使用某个特定的 InVEST 模型有关,",
|
"no invest workspaces will be deleted": "没有invest的工作空间将被删除",
|
||||||
"Consider taking a screenshot of the problem.": "请截屏保存以便随后发布帖子寻求帮助。",
|
"Warning": "警告",
|
||||||
"Find the log files using the button below. There may be multiple files with a \".log\" extension; please include them all.": "使用下面的按钮查找日志文件。 可能有多个扩展名为“.log”的文件; 请把它们全部包括在内。",
|
"Changing this setting will close your tabs and relaunch the app.": "更改此设置将关闭标签页并重新启动应用程序。",
|
||||||
"Create a post on our forum and upload these items, along with a brief description of the problem.": "在我们的论坛上创建一个帖子并上传这些项目以及问题的简要说明。",
|
"Change to ": "更改到",
|
||||||
"Export all input data to a compressed archive": "将所有输入数据导出到压缩档案中",
|
"Not all of the spatial layers overlap each other. Bounding box:": "并非所有空间层都相互重叠。边界框:",
|
||||||
"InVEST": "InVEST",
|
"optional": "可选项",
|
||||||
"Save datastack": "保存数据包",
|
"boolean": "布尔值",
|
||||||
"Save model setup to a JSON file": "将模型设置保存到一个JSON文件中",
|
"integer": "整数",
|
||||||
"Save model setup to a Python script": "将模型设置保存到Python脚本中",
|
"csv": "csv",
|
||||||
"Save to JSON": "保存到JSON",
|
"directory": "模型数据目录",
|
||||||
"Save to Python script": "保存到Python脚本",
|
"file": "文件后缀",
|
||||||
"true/false": "真/假",
|
"number": "数量",
|
||||||
"CSV": "CSV",
|
"percent": "百分比",
|
||||||
"directory": "模型数据目录",
|
"raster": "栅格",
|
||||||
"file": "文件后缀",
|
"ratio": "比例",
|
||||||
"integer": "整数",
|
"vector": "向量",
|
||||||
"number": "数量",
|
"text": "文本",
|
||||||
"option": "选项",
|
"percent: a number from 0 - 100": "百分比:一个从0-100的数字",
|
||||||
"percent": "百分比",
|
"ratio: a decimal from 0 - 1": "比率:一个从0-1的小数",
|
||||||
"raster": "栅格",
|
"User's guide entry": "用户指南条目",
|
||||||
"ratio": "比例",
|
"Only drop one file at a time.": "一次只能丢一个文件。",
|
||||||
"vector": "向量",
|
"Choose location to extract archive": "选择提取存档的位置",
|
||||||
"Taskgraph n_workers parameter": "任务图n_workers参数"
|
"Datastack/Logfile for {{modelName}} does not match this model.": "{{modelName}} 个数据堆栈/日志文件与此模型不匹配。",
|
||||||
|
"Running": "运行中",
|
||||||
|
"Run": "运行",
|
||||||
|
"Browse to a datastack (.json, .tgz) or InVEST logfile (.txt)": "浏览到数据包(.json, .tgz)或InVEST日志文件(.txt)。",
|
||||||
|
"Load parameters from file": "从文件中加载参数",
|
||||||
|
"No args to see here": "在args dict中缺少关键信息",
|
||||||
|
"version:": "版本:",
|
||||||
|
"Copyright 2023, The Natural Capital Project": "版权所有 2023 年,自然资本项目",
|
||||||
|
"Documentation": "文档",
|
||||||
|
"Homepage": "首页",
|
||||||
|
"Project page": "项目页面",
|
||||||
|
"License": "许可证",
|
||||||
|
"InVEST Trademark and Logo Use Policy": "InVEST 商标和徽标使用政策",
|
||||||
|
"Open-Source Licenses:": "开源许可证:",
|
||||||
|
"MIT and others": "MIT许可证及其他许可证",
|
||||||
|
"Please help us by reporting problems.": "请通过报告问题来帮助我们。",
|
||||||
|
"If the problem is related to a specific InVEST model, ": "如果这个问题与特定的 InVEST 模型有关,",
|
||||||
|
"please see the guidelines here for reporting problems: ": "请查看此指南,然后上报问题:",
|
||||||
|
"Guidelines for posting software support questions": "发布软件支持问题的指南",
|
||||||
|
"If the problem is related to this User Interface, ": "如果问题与此用户界面有关,",
|
||||||
|
"rather than with a specific InVEST model,": "而不是与使用某个特定的 InVEST 模型有关,",
|
||||||
|
"Consider taking a screenshot of the problem.": "请截屏保存以便随后发布帖子寻求帮助。",
|
||||||
|
"Find the log files using the button below. There may be multiple files with a \".log\" extension; please include them all.": "使用下面的按钮查找日志文件。 可能有多个扩展名为“.log”的文件; 请把它们全部包括在内。",
|
||||||
|
"Create a post on our forum and upload these items, along with a brief description of the problem.": "在我们的论坛上创建一个帖子并上传这些项目以及问题的简要说明。",
|
||||||
|
"Changing this setting will refresh the app and close all tabs": "更改此设置将刷新应用程序并关闭所有选项卡",
|
||||||
|
"Reset to Defaults": "重置为默认值",
|
||||||
|
"Bounding box does not intersect at least one other:": "边界框至少不与另一个相交:",
|
||||||
|
"Copyright 2022, The Natural Capital Project": "版权所有 2022, The Natural Capital Project",
|
||||||
|
"Documentation: ": "文档:",
|
||||||
|
"Homepage: ": "主页:",
|
||||||
|
"Project page: ": "项目页面:",
|
||||||
|
"License: ": "许可证:",
|
||||||
|
"Export all input data to a compressed archive": "将所有输入数据导出到压缩档案中",
|
||||||
|
"InVEST": "InVEST",
|
||||||
|
"Save datastack": "保存数据包",
|
||||||
|
"Save model setup to a JSON file": "将模型设置保存到一个JSON文件中",
|
||||||
|
"Save model setup to a Python script": "将模型设置保存到Python脚本中",
|
||||||
|
"Save to JSON": "保存到JSON",
|
||||||
|
"Save to Python script": "保存到Python脚本",
|
||||||
|
"true/false": "真/假",
|
||||||
|
"CSV": "CSV",
|
||||||
|
"option": "选项",
|
||||||
|
"Taskgraph n_workers parameter": "任务图n_workers参数",
|
||||||
|
"freestyle_string": "自由式",
|
||||||
|
"option_string": "选项"
|
||||||
}
|
}
|
|
@ -9,6 +9,7 @@ import { ipcMainChannels } from '../main/ipcMainChannels';
|
||||||
const { ipcRenderer } = window.Workbench.electron;
|
const { ipcRenderer } = window.Workbench.electron;
|
||||||
|
|
||||||
const isFirstRun = await ipcRenderer.invoke(ipcMainChannels.IS_FIRST_RUN);
|
const isFirstRun = await ipcRenderer.invoke(ipcMainChannels.IS_FIRST_RUN);
|
||||||
|
const isNewVersion = await ipcRenderer.invoke(ipcMainChannels.IS_NEW_VERSION);
|
||||||
const nCPU = await ipcRenderer.invoke(ipcMainChannels.GET_N_CPUS);
|
const nCPU = await ipcRenderer.invoke(ipcMainChannels.GET_N_CPUS);
|
||||||
|
|
||||||
const root = createRoot(document.getElementById('App'));
|
const root = createRoot(document.getElementById('App'));
|
||||||
|
@ -16,6 +17,7 @@ root.render(
|
||||||
<ErrorBoundary>
|
<ErrorBoundary>
|
||||||
<App
|
<App
|
||||||
isFirstRun={isFirstRun}
|
isFirstRun={isFirstRun}
|
||||||
|
isNewVersion={isNewVersion}
|
||||||
nCPU={nCPU}
|
nCPU={nCPU}
|
||||||
/>
|
/>
|
||||||
</ErrorBoundary>
|
</ErrorBoundary>
|
||||||
|
|
|
@ -253,7 +253,7 @@ exceed 100% of window.*/
|
||||||
}
|
}
|
||||||
|
|
||||||
.recent-job-card {
|
.recent-job-card {
|
||||||
width: inherit;
|
width: inherit;
|
||||||
margin-bottom: 1rem;
|
margin-bottom: 1rem;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
height: fit-content;
|
height: fit-content;
|
||||||
|
@ -617,3 +617,18 @@ input[type=text]::placeholder {
|
||||||
.error-boundary .btn {
|
.error-boundary .btn {
|
||||||
margin: 1rem;
|
margin: 1rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Changelog modal */
|
||||||
|
.link-external {
|
||||||
|
&:after {
|
||||||
|
content: '';
|
||||||
|
display: inline-block;
|
||||||
|
width: 1rem;
|
||||||
|
height: 1rem;
|
||||||
|
/* Icon is react-icons/md/MdOpenInNew, as a URL-encoded SVG */
|
||||||
|
background-image: url("data:image/svg+xml,%3Csvg stroke='currentColor' fill='currentColor' stroke-width='0' viewBox='0 0 24 24' class='mr-1' height='1em' width='1em' xmlns='http://www.w3.org/2000/svg'%3E%3Cpath fill='none' d='M0 0h24v24H0z'%3E%3C/path%3E%3Cpath d='M19 19H5V5h7V3H5a2 2 0 00-2 2v14a2 2 0 002 2h14c1.1 0 2-.9 2-2v-7h-2v7zM14 3v2h3.59l-9.83 9.83 1.41 1.41L19 6.41V10h2V3h-7z'%3E%3C/path%3E%3C/svg%3E");
|
||||||
|
background-repeat: no-repeat;
|
||||||
|
background-position: left 0.125rem top 0.125rem;
|
||||||
|
background-size: 1rem 1rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@ import puppeteer from 'puppeteer-core';
|
||||||
|
|
||||||
import pkg from '../../package.json';
|
import pkg from '../../package.json';
|
||||||
import { APP_HAS_RUN_TOKEN } from '../../src/main/setupCheckFirstRun';
|
import { APP_HAS_RUN_TOKEN } from '../../src/main/setupCheckFirstRun';
|
||||||
|
import { APP_VERSION_TOKEN } from '../../src/main/setupIsNewVersion';
|
||||||
|
|
||||||
jest.setTimeout(240000);
|
jest.setTimeout(240000);
|
||||||
const PORT = 9009;
|
const PORT = 9009;
|
||||||
|
@ -26,9 +27,10 @@ let BINARY_PATH;
|
||||||
// append to this prefix and the image will be uploaded to github artifacts
|
// append to this prefix and the image will be uploaded to github artifacts
|
||||||
// E.g. page.screenshot({ path: `${SCREENSHOT_PREFIX}screenshot.png` })
|
// E.g. page.screenshot({ path: `${SCREENSHOT_PREFIX}screenshot.png` })
|
||||||
let SCREENSHOT_PREFIX;
|
let SCREENSHOT_PREFIX;
|
||||||
// We'll clear this token before launching the app so we can have a
|
// We'll clear these tokens before launching the app so we can have a
|
||||||
// predictable startup page.
|
// predictable startup page.
|
||||||
let APP_HAS_RUN_TOKEN_PATH;
|
let APP_HAS_RUN_TOKEN_PATH;
|
||||||
|
let APP_VERSION_TOKEN_PATH;
|
||||||
|
|
||||||
// On GHA macos, invest validation can time-out reading from os.tmpdir
|
// On GHA macos, invest validation can time-out reading from os.tmpdir
|
||||||
// So on GHA, use the homedir instead.
|
// So on GHA, use the homedir instead.
|
||||||
|
@ -45,6 +47,9 @@ if (process.platform === 'darwin') {
|
||||||
APP_HAS_RUN_TOKEN_PATH = path.join(
|
APP_HAS_RUN_TOKEN_PATH = path.join(
|
||||||
os.homedir(), 'Library/Application Support', pkg.name, APP_HAS_RUN_TOKEN
|
os.homedir(), 'Library/Application Support', pkg.name, APP_HAS_RUN_TOKEN
|
||||||
);
|
);
|
||||||
|
APP_VERSION_TOKEN_PATH = path.join(
|
||||||
|
os.homedir(), 'Library/Application Support', pkg.name, APP_VERSION_TOKEN
|
||||||
|
);
|
||||||
} else if (process.platform === 'win32') {
|
} else if (process.platform === 'win32') {
|
||||||
[BINARY_PATH] = glob.sync('./dist/win-unpacked/InVEST*.exe');
|
[BINARY_PATH] = glob.sync('./dist/win-unpacked/InVEST*.exe');
|
||||||
SCREENSHOT_PREFIX = path.join(
|
SCREENSHOT_PREFIX = path.join(
|
||||||
|
@ -53,6 +58,9 @@ if (process.platform === 'darwin') {
|
||||||
APP_HAS_RUN_TOKEN_PATH = path.join(
|
APP_HAS_RUN_TOKEN_PATH = path.join(
|
||||||
os.homedir(), 'AppData/Roaming', pkg.name, APP_HAS_RUN_TOKEN
|
os.homedir(), 'AppData/Roaming', pkg.name, APP_HAS_RUN_TOKEN
|
||||||
);
|
);
|
||||||
|
APP_VERSION_TOKEN_PATH = path.join(
|
||||||
|
os.homedir(), 'AppData/Roaming', pkg.name, APP_VERSION_TOKEN
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!fs.existsSync(BINARY_PATH)) {
|
if (!fs.existsSync(BINARY_PATH)) {
|
||||||
|
@ -97,6 +105,7 @@ afterAll(() => {
|
||||||
// https://github.com/facebook/jest/issues/8688
|
// https://github.com/facebook/jest/issues/8688
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
try { fs.unlinkSync(APP_HAS_RUN_TOKEN_PATH); } catch {}
|
try { fs.unlinkSync(APP_HAS_RUN_TOKEN_PATH); } catch {}
|
||||||
|
try { fs.unlinkSync(APP_VERSION_TOKEN_PATH); } catch {}
|
||||||
// start the invest app and forward stderr to console
|
// start the invest app and forward stderr to console
|
||||||
ELECTRON_PROCESS = spawn(
|
ELECTRON_PROCESS = spawn(
|
||||||
`"${BINARY_PATH}"`,
|
`"${BINARY_PATH}"`,
|
||||||
|
@ -164,11 +173,22 @@ test('Run a real invest model', async () => {
|
||||||
});
|
});
|
||||||
await page.screenshot({ path: `${SCREENSHOT_PREFIX}1-page-load.png` });
|
await page.screenshot({ path: `${SCREENSHOT_PREFIX}1-page-load.png` });
|
||||||
|
|
||||||
const downloadModal = await page.waitForSelector('.modal-dialog');
|
const downloadModal = await page.waitForSelector(
|
||||||
|
'aria/[name="Download InVEST sample data"][role="dialog"]'
|
||||||
|
);
|
||||||
const downloadModalCancel = await downloadModal.waitForSelector(
|
const downloadModalCancel = await downloadModal.waitForSelector(
|
||||||
'aria/[name="Cancel"][role="button"]');
|
'aria/[name="Cancel"][role="button"]');
|
||||||
await page.waitForTimeout(WAIT_TO_CLICK); // waiting for click handler to be ready
|
await page.waitForTimeout(WAIT_TO_CLICK); // waiting for click handler to be ready
|
||||||
await downloadModalCancel.click();
|
await downloadModalCancel.click();
|
||||||
|
|
||||||
|
const changelogModal = await page.waitForSelector(
|
||||||
|
'aria/[name="New in this version"][role="dialog"]'
|
||||||
|
);
|
||||||
|
const changelogModalClose = await changelogModal.waitForSelector(
|
||||||
|
'aria/[name="Close modal"][role="button"]');
|
||||||
|
await page.waitForTimeout(WAIT_TO_CLICK); // waiting for click handler to be ready
|
||||||
|
await changelogModalClose.click();
|
||||||
|
|
||||||
// We need to get the modelButton from w/in this list-group because there
|
// We need to get the modelButton from w/in this list-group because there
|
||||||
// are buttons with the same name in the Recent Jobs container.
|
// are buttons with the same name in the Recent Jobs container.
|
||||||
const investModels = await page.waitForSelector('.invest-list-group');
|
const investModels = await page.waitForSelector('.invest-list-group');
|
||||||
|
@ -233,12 +253,23 @@ test('Check local userguide links', async () => {
|
||||||
page.on('error', (err) => {
|
page.on('error', (err) => {
|
||||||
console.log(err);
|
console.log(err);
|
||||||
});
|
});
|
||||||
const downloadModal = await page.waitForSelector('.modal-dialog');
|
|
||||||
|
const downloadModal = await page.waitForSelector(
|
||||||
|
'aria/[name="Download InVEST sample data"][role="dialog"]'
|
||||||
|
);
|
||||||
const downloadModalCancel = await downloadModal.waitForSelector(
|
const downloadModalCancel = await downloadModal.waitForSelector(
|
||||||
'aria/[name="Cancel"][role="button"]');
|
'aria/[name="Cancel"][role="button"]');
|
||||||
await page.waitForTimeout(WAIT_TO_CLICK); // waiting for click handler to be ready
|
await page.waitForTimeout(WAIT_TO_CLICK); // waiting for click handler to be ready
|
||||||
await downloadModalCancel.click();
|
await downloadModalCancel.click();
|
||||||
|
|
||||||
|
const changelogModal = await page.waitForSelector(
|
||||||
|
'aria/[name="New in this version"][role="dialog"]'
|
||||||
|
);
|
||||||
|
const changelogModalClose = await changelogModal.waitForSelector(
|
||||||
|
'aria/[name="Close modal"][role="button"]');
|
||||||
|
await page.waitForTimeout(WAIT_TO_CLICK); // waiting for click handler to be ready
|
||||||
|
await changelogModalClose.click();
|
||||||
|
|
||||||
const investList = await page.waitForSelector('.invest-list-group');
|
const investList = await page.waitForSelector('.invest-list-group');
|
||||||
const modelButtons = await investList.$$('aria/[role="button"]');
|
const modelButtons = await investList.$$('aria/[role="button"]');
|
||||||
|
|
||||||
|
|
|
@ -180,12 +180,14 @@ describe('createWindow', () => {
|
||||||
test('should register various ipcMain listeners', async () => {
|
test('should register various ipcMain listeners', async () => {
|
||||||
await createWindow();
|
await createWindow();
|
||||||
const expectedHandleChannels = [
|
const expectedHandleChannels = [
|
||||||
|
ipcMainChannels.BASE_URL,
|
||||||
ipcMainChannels.CHANGE_LANGUAGE,
|
ipcMainChannels.CHANGE_LANGUAGE,
|
||||||
ipcMainChannels.CHECK_STORAGE_TOKEN,
|
ipcMainChannels.CHECK_STORAGE_TOKEN,
|
||||||
ipcMainChannels.CHECK_FILE_PERMISSIONS,
|
ipcMainChannels.CHECK_FILE_PERMISSIONS,
|
||||||
ipcMainChannels.GET_SETTING,
|
ipcMainChannels.GET_SETTING,
|
||||||
ipcMainChannels.GET_N_CPUS,
|
ipcMainChannels.GET_N_CPUS,
|
||||||
ipcMainChannels.INVEST_VERSION,
|
ipcMainChannels.INVEST_VERSION,
|
||||||
|
ipcMainChannels.IS_NEW_VERSION,
|
||||||
ipcMainChannels.IS_FIRST_RUN,
|
ipcMainChannels.IS_FIRST_RUN,
|
||||||
ipcMainChannels.OPEN_PATH,
|
ipcMainChannels.OPEN_PATH,
|
||||||
ipcMainChannels.SHOW_OPEN_DIALOG,
|
ipcMainChannels.SHOW_OPEN_DIALOG,
|
||||||
|
|
|
@ -0,0 +1,77 @@
|
||||||
|
import React from 'react';
|
||||||
|
import '@testing-library/jest-dom';
|
||||||
|
import { render } from '@testing-library/react';
|
||||||
|
import userEvent from '@testing-library/user-event';
|
||||||
|
|
||||||
|
import App from '../../src/renderer/app';
|
||||||
|
import pkg from '../../package.json';
|
||||||
|
|
||||||
|
describe('Changelog', () => {
|
||||||
|
const currentVersion = pkg.version;
|
||||||
|
const nonexistentVersion = 'nonexistent-version';
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.spyOn(window, 'fetch')
|
||||||
|
.mockResolvedValue({
|
||||||
|
ok: true,
|
||||||
|
text: () => `
|
||||||
|
<html>
|
||||||
|
<head></head>
|
||||||
|
<body>
|
||||||
|
<section>
|
||||||
|
<h1>${currentVersion}</h1>
|
||||||
|
</section>
|
||||||
|
<section>
|
||||||
|
<h1>${nonexistentVersion}</h1>
|
||||||
|
</section>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('Changelog modal opens immediately on launch of a new version', async () => {
|
||||||
|
const { findByRole } = render(<App isNewVersion />);
|
||||||
|
const changelogModal = await findByRole('dialog', { name: 'New in this version' });
|
||||||
|
expect(changelogModal).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('On first run (of any version), Changelog modal opens after Download modal is closed', async () => {
|
||||||
|
const { findByRole, getByText } = render(<App isFirstRun isNewVersion />);
|
||||||
|
|
||||||
|
let changelogModalFound = true;
|
||||||
|
try {
|
||||||
|
await findByRole('dialog', { name: 'New in this version' });
|
||||||
|
} catch {
|
||||||
|
changelogModalFound = false;
|
||||||
|
}
|
||||||
|
expect(changelogModalFound).toBe(false);
|
||||||
|
|
||||||
|
const downloadModal = await findByRole('dialog', { name: 'Download InVEST sample data' });
|
||||||
|
expect(downloadModal).toBeInTheDocument();
|
||||||
|
|
||||||
|
await userEvent.click(getByText('Cancel'));
|
||||||
|
expect(downloadModal).not.toBeInTheDocument();
|
||||||
|
const changelogModal = await findByRole('dialog', { name: 'New in this version' });
|
||||||
|
expect(changelogModal).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('Changelog modal does not open when current version has been run before', async () => {
|
||||||
|
const { findByRole } = render(<App isNewVersion={false} />);
|
||||||
|
let changelogModalFound = true;
|
||||||
|
try {
|
||||||
|
await findByRole('dialog', { name: 'New in this version' });
|
||||||
|
} catch {
|
||||||
|
changelogModalFound = false;
|
||||||
|
}
|
||||||
|
expect(changelogModalFound).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('Changelog modal contains only content relevant to the current version', async () => {
|
||||||
|
const { findByRole, queryByRole } = render(<App isNewVersion />);
|
||||||
|
const currentVersionSectionHeading = await findByRole('heading', { name: currentVersion });
|
||||||
|
expect(currentVersionSectionHeading).toBeInTheDocument();
|
||||||
|
|
||||||
|
const nonexistentVersionSectionHeading = queryByRole('heading', { name: nonexistentVersion });
|
||||||
|
expect(nonexistentVersionSectionHeading).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
|
@ -24,6 +24,7 @@ export default defineConfig({
|
||||||
path.resolve(PROJECT_ROOT, 'splash.html'),
|
path.resolve(PROJECT_ROOT, 'splash.html'),
|
||||||
path.resolve(PROJECT_ROOT, 'report_a_problem.html'),
|
path.resolve(PROJECT_ROOT, 'report_a_problem.html'),
|
||||||
path.resolve(PROJECT_ROOT, 'about.html'),
|
path.resolve(PROJECT_ROOT, 'about.html'),
|
||||||
|
path.resolve(PROJECT_ROOT, 'changelog.html'),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
emptyOutDir: true,
|
emptyOutDir: true,
|
||||||
|
|
Loading…
Reference in New Issue